summaryrefslogtreecommitdiffstats
path: root/services/sync
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /services/sync
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloadUXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip
Add m-esr52 at 52.6.0
Diffstat (limited to 'services/sync')
-rw-r--r--services/sync/SyncComponents.manifest21
-rw-r--r--services/sync/Weave.js200
-rw-r--r--services/sync/locales/en-US/errors.properties27
-rw-r--r--services/sync/locales/en-US/sync.properties27
-rw-r--r--services/sync/locales/jar.mn10
-rw-r--r--services/sync/locales/l10n.ini9
-rw-r--r--services/sync/locales/moz.build7
-rw-r--r--services/sync/modules-testing/fakeservices.js131
-rw-r--r--services/sync/modules-testing/fxa_utils.js58
-rw-r--r--services/sync/modules-testing/rotaryengine.js124
-rw-r--r--services/sync/modules-testing/utils.js350
-rw-r--r--services/sync/modules/FxaMigrator.jsm99
-rw-r--r--services/sync/modules/SyncedTabs.jsm301
-rw-r--r--services/sync/modules/addonsreconciler.js676
-rw-r--r--services/sync/modules/addonutils.js506
-rw-r--r--services/sync/modules/bookmark_validator.js784
-rw-r--r--services/sync/modules/browserid_identity.js869
-rw-r--r--services/sync/modules/collection_validator.js204
-rw-r--r--services/sync/modules/constants.js198
-rw-r--r--services/sync/modules/engines.js1813
-rw-r--r--services/sync/modules/engines/addons.js813
-rw-r--r--services/sync/modules/engines/bookmarks.js1378
-rw-r--r--services/sync/modules/engines/clients.js782
-rw-r--r--services/sync/modules/engines/extension-storage.js277
-rw-r--r--services/sync/modules/engines/forms.js305
-rw-r--r--services/sync/modules/engines/history.js442
-rw-r--r--services/sync/modules/engines/passwords.js371
-rw-r--r--services/sync/modules/engines/prefs.js273
-rw-r--r--services/sync/modules/engines/tabs.js393
-rw-r--r--services/sync/modules/identity.js605
-rw-r--r--services/sync/modules/jpakeclient.js773
-rw-r--r--services/sync/modules/keys.js214
-rw-r--r--services/sync/modules/main.js30
-rw-r--r--services/sync/modules/policies.js983
-rw-r--r--services/sync/modules/record.js1039
-rw-r--r--services/sync/modules/resource.js669
-rw-r--r--services/sync/modules/rest.js90
-rw-r--r--services/sync/modules/service.js1756
-rw-r--r--services/sync/modules/stages/cluster.js113
-rw-r--r--services/sync/modules/stages/declined.js76
-rw-r--r--services/sync/modules/stages/enginesync.js449
-rw-r--r--services/sync/modules/status.js145
-rw-r--r--services/sync/modules/telemetry.js578
-rw-r--r--services/sync/modules/userapi.js224
-rw-r--r--services/sync/modules/util.js797
-rw-r--r--services/sync/moz.build78
-rw-r--r--services/sync/services-sync.js95
-rw-r--r--services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml27
-rw-r--r--services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml27
-rw-r--r--services/sync/tests/tps/addons/restartless.xpibin0 -> 485 bytes
-rw-r--r--services/sync/tests/tps/addons/unsigned.xpibin0 -> 452 bytes
-rw-r--r--services/sync/tests/tps/all_tests.json33
-rw-r--r--services/sync/tests/tps/mozmill_sanity.js30
-rw-r--r--services/sync/tests/tps/mozmill_sanity2.js15
-rw-r--r--services/sync/tests/tps/test_addon_nonrestartless_xpi.js105
-rw-r--r--services/sync/tests/tps/test_addon_reconciling.js54
-rw-r--r--services/sync/tests/tps/test_addon_restartless_xpi.js70
-rw-r--r--services/sync/tests/tps/test_addon_sanity.js30
-rw-r--r--services/sync/tests/tps/test_addon_wipe.js35
-rw-r--r--services/sync/tests/tps/test_bookmark_conflict.js143
-rw-r--r--services/sync/tests/tps/test_bookmarks_in_same_named_folder.js69
-rw-r--r--services/sync/tests/tps/test_bug501528.js79
-rw-r--r--services/sync/tests/tps/test_bug530717.js69
-rw-r--r--services/sync/tests/tps/test_bug531489.js62
-rw-r--r--services/sync/tests/tps/test_bug535326.js130
-rw-r--r--services/sync/tests/tps/test_bug538298.js93
-rw-r--r--services/sync/tests/tps/test_bug546807.js54
-rw-r--r--services/sync/tests/tps/test_bug556509.js45
-rw-r--r--services/sync/tests/tps/test_bug562515.js105
-rw-r--r--services/sync/tests/tps/test_bug563989.js107
-rw-r--r--services/sync/tests/tps/test_bug575423.js85
-rw-r--r--services/sync/tests/tps/test_client_wipe.js164
-rw-r--r--services/sync/tests/tps/test_formdata.js97
-rw-r--r--services/sync/tests/tps/test_history.js167
-rw-r--r--services/sync/tests/tps/test_history_collision.js125
-rw-r--r--services/sync/tests/tps/test_mozmill_sanity.js24
-rw-r--r--services/sync/tests/tps/test_passwords.js113
-rw-r--r--services/sync/tests/tps/test_prefs.js57
-rw-r--r--services/sync/tests/tps/test_privbrw_passwords.js104
-rw-r--r--services/sync/tests/tps/test_privbrw_tabs.js87
-rw-r--r--services/sync/tests/tps/test_special_tabs.js78
-rw-r--r--services/sync/tests/tps/test_sync.js424
-rw-r--r--services/sync/tests/tps/test_tabs.js59
-rw-r--r--services/sync/tests/unit/addon1-search.xml27
-rw-r--r--services/sync/tests/unit/bootstrap1-search.xml27
-rw-r--r--services/sync/tests/unit/fake_login_manager.js38
-rw-r--r--services/sync/tests/unit/head_appinfo.js57
-rw-r--r--services/sync/tests/unit/head_errorhandler_common.js112
-rw-r--r--services/sync/tests/unit/head_helpers.js446
-rw-r--r--services/sync/tests/unit/head_http_server.js1044
-rw-r--r--services/sync/tests/unit/missing-sourceuri.xml27
-rw-r--r--services/sync/tests/unit/missing-xpi-search.xml27
-rw-r--r--services/sync/tests/unit/places_v10_from_v11.sqlitebin0 -> 1081344 bytes
-rw-r--r--services/sync/tests/unit/prefs_test_prefs_store.js25
-rw-r--r--services/sync/tests/unit/rewrite-search.xml27
-rw-r--r--services/sync/tests/unit/sync_ping_schema.json198
-rw-r--r--services/sync/tests/unit/systemaddon-search.xml27
-rw-r--r--services/sync/tests/unit/test_addon_utils.js141
-rw-r--r--services/sync/tests/unit/test_addons_engine.js253
-rw-r--r--services/sync/tests/unit/test_addons_reconciler.js195
-rw-r--r--services/sync/tests/unit/test_addons_store.js539
-rw-r--r--services/sync/tests/unit/test_addons_tracker.js177
-rw-r--r--services/sync/tests/unit/test_bookmark_batch_fail.js23
-rw-r--r--services/sync/tests/unit/test_bookmark_duping.js644
-rw-r--r--services/sync/tests/unit/test_bookmark_engine.js665
-rw-r--r--services/sync/tests/unit/test_bookmark_invalid.js63
-rw-r--r--services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js99
-rw-r--r--services/sync/tests/unit/test_bookmark_livemarks.js134
-rw-r--r--services/sync/tests/unit/test_bookmark_order.js529
-rw-r--r--services/sync/tests/unit/test_bookmark_places_query_rewriting.js60
-rw-r--r--services/sync/tests/unit/test_bookmark_record.js48
-rw-r--r--services/sync/tests/unit/test_bookmark_smart_bookmarks.js235
-rw-r--r--services/sync/tests/unit/test_bookmark_store.js534
-rw-r--r--services/sync/tests/unit/test_bookmark_tracker.js1537
-rw-r--r--services/sync/tests/unit/test_bookmark_validator.js347
-rw-r--r--services/sync/tests/unit/test_browserid_identity.js890
-rw-r--r--services/sync/tests/unit/test_clients_engine.js1439
-rw-r--r--services/sync/tests/unit/test_clients_escape.js64
-rw-r--r--services/sync/tests/unit/test_collection_getBatched.js195
-rw-r--r--services/sync/tests/unit/test_collection_inc_get.js188
-rw-r--r--services/sync/tests/unit/test_collections_recovery.js85
-rw-r--r--services/sync/tests/unit/test_corrupt_keys.js233
-rw-r--r--services/sync/tests/unit/test_declined.js153
-rw-r--r--services/sync/tests/unit/test_engine.js219
-rw-r--r--services/sync/tests/unit/test_engine_abort.js69
-rw-r--r--services/sync/tests/unit/test_enginemanager.js114
-rw-r--r--services/sync/tests/unit/test_errorhandler_1.js913
-rw-r--r--services/sync/tests/unit/test_errorhandler_2.js1012
-rw-r--r--services/sync/tests/unit/test_errorhandler_eol.js137
-rw-r--r--services/sync/tests/unit/test_errorhandler_filelog.js370
-rw-r--r--services/sync/tests/unit/test_errorhandler_sync_checkServerError.js282
-rw-r--r--services/sync/tests/unit/test_extension_storage_crypto.js93
-rw-r--r--services/sync/tests/unit/test_extension_storage_engine.js62
-rw-r--r--services/sync/tests/unit/test_extension_storage_tracker.js38
-rw-r--r--services/sync/tests/unit/test_forms_store.js151
-rw-r--r--services/sync/tests/unit/test_forms_tracker.js72
-rw-r--r--services/sync/tests/unit/test_fxa_migration.js117
-rw-r--r--services/sync/tests/unit/test_fxa_node_reassignment.js368
-rw-r--r--services/sync/tests/unit/test_fxa_service_cluster.js68
-rw-r--r--services/sync/tests/unit/test_fxa_startOver.js63
-rw-r--r--services/sync/tests/unit/test_history_engine.js147
-rw-r--r--services/sync/tests/unit/test_history_store.js297
-rw-r--r--services/sync/tests/unit/test_history_tracker.js203
-rw-r--r--services/sync/tests/unit/test_hmac_error.js248
-rw-r--r--services/sync/tests/unit/test_httpd_sync_server.js285
-rw-r--r--services/sync/tests/unit/test_identity_manager.js284
-rw-r--r--services/sync/tests/unit/test_interval_triggers.js450
-rw-r--r--services/sync/tests/unit/test_jpakeclient.js562
-rw-r--r--services/sync/tests/unit/test_keys.js326
-rw-r--r--services/sync/tests/unit/test_load_modules.js55
-rw-r--r--services/sync/tests/unit/test_node_reassignment.js523
-rw-r--r--services/sync/tests/unit/test_password_store.js199
-rw-r--r--services/sync/tests/unit/test_password_tracker.js101
-rw-r--r--services/sync/tests/unit/test_password_validator.js158
-rw-r--r--services/sync/tests/unit/test_places_guid_downgrade.js215
-rw-r--r--services/sync/tests/unit/test_postqueue.js455
-rw-r--r--services/sync/tests/unit/test_prefs_store.js168
-rw-r--r--services/sync/tests/unit/test_prefs_tracker.js88
-rw-r--r--services/sync/tests/unit/test_records_crypto.js182
-rw-r--r--services/sync/tests/unit/test_records_wbo.js86
-rw-r--r--services/sync/tests/unit/test_resource.js502
-rw-r--r--services/sync/tests/unit/test_resource_async.js730
-rw-r--r--services/sync/tests/unit/test_resource_header.js65
-rw-r--r--services/sync/tests/unit/test_resource_ua.js100
-rw-r--r--services/sync/tests/unit/test_score_triggers.js149
-rw-r--r--services/sync/tests/unit/test_sendcredentials_controller.js102
-rw-r--r--services/sync/tests/unit/test_service_attributes.js118
-rw-r--r--services/sync/tests/unit/test_service_changePassword.js80
-rw-r--r--services/sync/tests/unit/test_service_checkAccount.js41
-rw-r--r--services/sync/tests/unit/test_service_cluster.js110
-rw-r--r--services/sync/tests/unit/test_service_createAccount.js75
-rw-r--r--services/sync/tests/unit/test_service_detect_upgrade.js297
-rw-r--r--services/sync/tests/unit/test_service_getStorageInfo.js94
-rw-r--r--services/sync/tests/unit/test_service_login.js245
-rw-r--r--services/sync/tests/unit/test_service_migratePrefs.js70
-rw-r--r--services/sync/tests/unit/test_service_passwordUTF8.js95
-rw-r--r--services/sync/tests/unit/test_service_persistLogin.js46
-rw-r--r--services/sync/tests/unit/test_service_set_serverURL.js13
-rw-r--r--services/sync/tests/unit/test_service_startOver.js101
-rw-r--r--services/sync/tests/unit/test_service_startup.js49
-rw-r--r--services/sync/tests/unit/test_service_sync_401.js84
-rw-r--r--services/sync/tests/unit/test_service_sync_locked.js37
-rw-r--r--services/sync/tests/unit/test_service_sync_remoteSetup.js237
-rw-r--r--services/sync/tests/unit/test_service_sync_specified.js160
-rw-r--r--services/sync/tests/unit/test_service_sync_updateEnabledEngines.js442
-rw-r--r--services/sync/tests/unit/test_service_verifyLogin.js122
-rw-r--r--services/sync/tests/unit/test_service_wipeClient.js112
-rw-r--r--services/sync/tests/unit/test_service_wipeServer.js242
-rw-r--r--services/sync/tests/unit/test_status.js91
-rw-r--r--services/sync/tests/unit/test_status_checkSetup.js45
-rw-r--r--services/sync/tests/unit/test_syncedtabs.js221
-rw-r--r--services/sync/tests/unit/test_syncengine.js204
-rw-r--r--services/sync/tests/unit/test_syncengine_sync.js1855
-rw-r--r--services/sync/tests/unit/test_syncscheduler.js1033
-rw-r--r--services/sync/tests/unit/test_syncstoragerequest.js220
-rw-r--r--services/sync/tests/unit/test_tab_engine.js141
-rw-r--r--services/sync/tests/unit/test_tab_store.js116
-rw-r--r--services/sync/tests/unit/test_tab_tracker.js127
-rw-r--r--services/sync/tests/unit/test_telemetry.js564
-rw-r--r--services/sync/tests/unit/test_tracker_addChanged.js59
-rw-r--r--services/sync/tests/unit/test_upgrade_old_sync_key.js49
-rw-r--r--services/sync/tests/unit/test_utils_catch.js94
-rw-r--r--services/sync/tests/unit/test_utils_deepEquals.js44
-rw-r--r--services/sync/tests/unit/test_utils_deferGetSet.js49
-rw-r--r--services/sync/tests/unit/test_utils_deriveKey.js66
-rw-r--r--services/sync/tests/unit/test_utils_getErrorString.js14
-rw-r--r--services/sync/tests/unit/test_utils_json.js114
-rw-r--r--services/sync/tests/unit/test_utils_keyEncoding.js15
-rw-r--r--services/sync/tests/unit/test_utils_lazyStrings.js14
-rw-r--r--services/sync/tests/unit/test_utils_lock.js79
-rw-r--r--services/sync/tests/unit/test_utils_makeGUID.js40
-rw-r--r--services/sync/tests/unit/test_utils_notify.js100
-rw-r--r--services/sync/tests/unit/test_utils_passphrase.js73
-rw-r--r--services/sync/tests/unit/test_warn_on_truncated_response.js95
-rw-r--r--services/sync/tests/unit/xpcshell.ini200
-rwxr-xr-xservices/sync/tps/extensions/mozmill/chrome.manifest2
-rwxr-xr-xservices/sync/tps/extensions/mozmill/install.rdf24
-rw-r--r--services/sync/tps/extensions/mozmill/resource/driver/controller.js1141
-rw-r--r--services/sync/tps/extensions/mozmill/resource/driver/elementslib.js537
-rw-r--r--services/sync/tps/extensions/mozmill/resource/driver/mozelement.js1163
-rw-r--r--services/sync/tps/extensions/mozmill/resource/driver/mozmill.js285
-rw-r--r--services/sync/tps/extensions/mozmill/resource/driver/msgbroker.js58
-rw-r--r--services/sync/tps/extensions/mozmill/resource/modules/assertions.js670
-rw-r--r--services/sync/tps/extensions/mozmill/resource/modules/driver.js290
-rw-r--r--services/sync/tps/extensions/mozmill/resource/modules/errors.js102
-rw-r--r--services/sync/tps/extensions/mozmill/resource/modules/frame.js788
-rw-r--r--services/sync/tps/extensions/mozmill/resource/modules/l10n.js71
-rw-r--r--services/sync/tps/extensions/mozmill/resource/modules/stack.js43
-rw-r--r--services/sync/tps/extensions/mozmill/resource/modules/windows.js292
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js823
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/arrays.js78
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/dom.js24
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/httpd.js5355
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/json2.js469
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/objects.js54
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/os.js57
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js370
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/strings.js17
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/utils.js455
-rw-r--r--services/sync/tps/extensions/mozmill/resource/stdlib/withs.js146
-rw-r--r--services/sync/tps/extensions/tps/chrome.manifest5
-rw-r--r--services/sync/tps/extensions/tps/components/tps-cmdline.js150
-rw-r--r--services/sync/tps/extensions/tps/install.rdf28
-rw-r--r--services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm121
-rw-r--r--services/sync/tps/extensions/tps/resource/auth/sync.jsm88
-rw-r--r--services/sync/tps/extensions/tps/resource/logger.jsm148
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/addons.jsm127
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm1001
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/forms.jsm219
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/history.jsm207
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/passwords.jsm163
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/prefs.jsm117
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/tabs.jsm67
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/windows.jsm36
-rw-r--r--services/sync/tps/extensions/tps/resource/quit.js63
-rw-r--r--services/sync/tps/extensions/tps/resource/tps.jsm1340
256 files changed, 71294 insertions, 0 deletions
diff --git a/services/sync/SyncComponents.manifest b/services/sync/SyncComponents.manifest
new file mode 100644
index 000000000..6493bb224
--- /dev/null
+++ b/services/sync/SyncComponents.manifest
@@ -0,0 +1,21 @@
+# WeaveService has to restrict its registration for the app-startup category
+# to the specific list of apps that use it so it doesn't get loaded in xpcshell.
+# Thus we restrict it to these apps:
+#
+# b2g: {3c2e2abc-06d4-11e1-ac3b-374f68613e61}
+# browser: {ec8030f7-c20a-464f-9b0e-13a3a9e97384}
+# mobile/android: {aa3c5121-dab2-40e2-81ca-7ea25febc110}
+# mobile/xul: {a23983c0-fd0e-11dc-95ff-0800200c9a66}
+# suite (comm): {92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}
+# graphene: {d1bfe7d9-c01e-4237-998b-7b5f960a4314}
+
+# Weave.js
+component {74b89fb0-f200-4ae8-a3ec-dd164117f6de} Weave.js
+contract @mozilla.org/weave/service;1 {74b89fb0-f200-4ae8-a3ec-dd164117f6de}
+category app-startup WeaveService service,@mozilla.org/weave/service;1 application={3c2e2abc-06d4-11e1-ac3b-374f68613e61} application={ec8030f7-c20a-464f-9b0e-13a3a9e97384} application={aa3c5121-dab2-40e2-81ca-7ea25febc110} application={a23983c0-fd0e-11dc-95ff-0800200c9a66} application={92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a} application={99bceaaa-e3c6-48c1-b981-ef9b46b67d60} application={d1bfe7d9-c01e-4237-998b-7b5f960a4314}
+component {d28f8a0b-95da-48f4-b712-caf37097be41} Weave.js
+contract @mozilla.org/network/protocol/about;1?what=sync-log {d28f8a0b-95da-48f4-b712-caf37097be41}
+
+# Register resource aliases
+# (Note, for tests these are also set up in addResourceAlias)
+resource services-sync resource://gre/modules/services-sync/
diff --git a/services/sync/Weave.js b/services/sync/Weave.js
new file mode 100644
index 000000000..4d79144e3
--- /dev/null
+++ b/services/sync/Weave.js
@@ -0,0 +1,200 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+const Cc = Components.classes;
+const Ci = Components.interfaces;
+const Cu = Components.utils;
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/FileUtils.jsm");
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://services-sync/util.js");
+
+const SYNC_PREFS_BRANCH = "services.sync.";
+
+
+/**
+ * Sync's XPCOM service.
+ *
+ * It is named "Weave" for historical reasons.
+ *
+ * It's worth noting how Sync is lazily loaded. We register a timer that
+ * loads Sync a few seconds after app startup. This is so Sync does not
+ * adversely affect application start time.
+ *
+ * If Sync is not configured, no extra Sync code is loaded. If an
+ * external component (say the UI) needs to interact with Sync, it
+ * should use the promise-base function whenLoaded() - something like the
+ * following:
+ *
+ * // 1. Grab a handle to the Sync XPCOM service.
+ * let service = Cc["@mozilla.org/weave/service;1"]
+ * .getService(Components.interfaces.nsISupports)
+ * .wrappedJSObject;
+ *
+ * // 2. Use the .then method of the promise.
+ * service.whenLoaded().then(() => {
+ * // You are free to interact with "Weave." objects.
+ * return;
+ * });
+ *
+ * And that's it! However, if you really want to avoid promises and do it
+ * old-school, then
+ *
+ * // 1. Get a reference to the service as done in (1) above.
+ *
+ * // 2. Check if the service has been initialized.
+ * if (service.ready) {
+ * // You are free to interact with "Weave." objects.
+ * return;
+ * }
+ *
+ * // 3. Install "ready" listener.
+ * Services.obs.addObserver(function onReady() {
+ * Services.obs.removeObserver(onReady, "weave:service:ready");
+ *
+ * // You are free to interact with "Weave." objects.
+ * }, "weave:service:ready", false);
+ *
+ * // 4. Trigger loading of Sync.
+ * service.ensureLoaded();
+ */
+function WeaveService() {
+ this.wrappedJSObject = this;
+ this.ready = false;
+}
+WeaveService.prototype = {
+ classID: Components.ID("{74b89fb0-f200-4ae8-a3ec-dd164117f6de}"),
+
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsIObserver,
+ Ci.nsISupportsWeakReference]),
+
+ ensureLoaded: function () {
+ // If we are loaded and not using FxA, load the migration module.
+ if (!this.fxAccountsEnabled) {
+ Cu.import("resource://services-sync/FxaMigrator.jsm");
+ }
+
+ Components.utils.import("resource://services-sync/main.js");
+
+ // Side-effect of accessing the service is that it is instantiated.
+ Weave.Service;
+ },
+
+ whenLoaded: function() {
+ if (this.ready) {
+ return Promise.resolve();
+ }
+ let deferred = Promise.defer();
+
+ Services.obs.addObserver(function onReady() {
+ Services.obs.removeObserver(onReady, "weave:service:ready");
+ deferred.resolve();
+ }, "weave:service:ready", false);
+ this.ensureLoaded();
+ return deferred.promise;
+ },
+
+ /**
+ * Whether Firefox Accounts is enabled.
+ *
+ * @return bool
+ */
+ get fxAccountsEnabled() {
+ try {
+ // Old sync guarantees '@' will never appear in the username while FxA
+ // uses the FxA email address - so '@' is the flag we use.
+ let username = Services.prefs.getCharPref(SYNC_PREFS_BRANCH + "username");
+ return !username || username.includes('@');
+ } catch (_) {
+ return true; // No username == only allow FxA to be configured.
+ }
+ },
+
+ /**
+ * Whether Sync appears to be enabled.
+ *
+ * This returns true if all the Sync preferences for storing account
+ * and server configuration are populated.
+ *
+ * It does *not* perform a robust check to see if the client is working.
+ * For that, you'll want to check Weave.Status.checkSetup().
+ */
+ get enabled() {
+ let prefs = Services.prefs.getBranch(SYNC_PREFS_BRANCH);
+ return prefs.prefHasUserValue("username");
+ },
+
+ observe: function (subject, topic, data) {
+ switch (topic) {
+ case "app-startup":
+ let os = Cc["@mozilla.org/observer-service;1"].
+ getService(Ci.nsIObserverService);
+ os.addObserver(this, "final-ui-startup", true);
+ break;
+
+ case "final-ui-startup":
+ // Force Weave service to load if it hasn't triggered from overlays
+ this.timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ this.timer.initWithCallback({
+ notify: function() {
+ let isConfigured = false;
+ // We only load more if it looks like Sync is configured.
+ let prefs = Services.prefs.getBranch(SYNC_PREFS_BRANCH);
+ if (prefs.prefHasUserValue("username")) {
+ // We have a username. So, do a more thorough check. This will
+ // import a number of modules and thus increase memory
+ // accordingly. We could potentially copy code performed by
+ // this check into this file if our above code is yielding too
+ // many false positives.
+ Components.utils.import("resource://services-sync/main.js");
+ isConfigured = Weave.Status.checkSetup() != Weave.CLIENT_NOT_CONFIGURED;
+ }
+ let getHistogramById = Services.telemetry.getHistogramById;
+ getHistogramById("WEAVE_CONFIGURED").add(isConfigured);
+ if (isConfigured) {
+ getHistogramById("WEAVE_CONFIGURED_MASTER_PASSWORD").add(Utils.mpEnabled());
+ this.ensureLoaded();
+ }
+ }.bind(this)
+ }, 10000, Ci.nsITimer.TYPE_ONE_SHOT);
+ break;
+ }
+ }
+};
+
+function AboutWeaveLog() {}
+AboutWeaveLog.prototype = {
+ classID: Components.ID("{d28f8a0b-95da-48f4-b712-caf37097be41}"),
+
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsIAboutModule,
+ Ci.nsISupportsWeakReference]),
+
+ getURIFlags: function(aURI) {
+ return 0;
+ },
+
+ newChannel: function(aURI, aLoadInfo) {
+ let dir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
+ let uri = Services.io.newFileURI(dir);
+ let channel = Services.io.newChannelFromURIWithLoadInfo(uri, aLoadInfo);
+
+ channel.originalURI = aURI;
+
+ // Ensure that the about page has the same privileges as a regular directory
+ // view. That way links to files can be opened. make sure we use the correct
+ // origin attributes when creating the principal for accessing the
+ // about:sync-log data.
+ let ssm = Cc["@mozilla.org/scriptsecuritymanager;1"]
+ .getService(Ci.nsIScriptSecurityManager);
+ let principal = ssm.createCodebasePrincipal(uri, aLoadInfo.originAttributes);
+
+ channel.owner = principal;
+ return channel;
+ }
+};
+
+const components = [WeaveService, AboutWeaveLog];
+this.NSGetFactory = XPCOMUtils.generateNSGetFactory(components);
diff --git a/services/sync/locales/en-US/errors.properties b/services/sync/locales/en-US/errors.properties
new file mode 100644
index 000000000..3dfa074bc
--- /dev/null
+++ b/services/sync/locales/en-US/errors.properties
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+error.login.reason.network = Failed to connect to the server
+error.login.reason.recoverykey = Wrong Recovery Key
+error.login.reason.account = Incorrect account name or password
+error.login.reason.no_username = Missing account name
+error.login.reason.no_password2 = Missing password
+error.login.reason.no_recoverykey= No saved Recovery Key to use
+error.login.reason.server = Server incorrectly configured
+
+error.sync.failed_partial = One or more data types could not be synced
+# LOCALIZATION NOTE (error.sync.reason.serverMaintenance): We removed the extraneous period from this string
+error.sync.reason.serverMaintenance = Firefox Sync server maintenance is underway, syncing will resume automatically
+
+invalid-captcha = Incorrect words, try again
+weak-password = Use a stronger password
+
+# this is the fallback, if we hit an error we didn't bother to localize
+error.reason.unknown = Unknown error
+
+change.password.pwSameAsPassword = Password can’t match current password
+change.password.pwSameAsUsername = Password can’t match your user name
+change.password.pwSameAsEmail = Password can’t match your email address
+change.password.mismatch = The passwords entered do not match
+change.password.tooShort = The password entered is too short
diff --git a/services/sync/locales/en-US/sync.properties b/services/sync/locales/en-US/sync.properties
new file mode 100644
index 000000000..d00183118
--- /dev/null
+++ b/services/sync/locales/en-US/sync.properties
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# %1: the user name (Ed), %2: the app name (Firefox), %3: the operating system (Android)
+client.name2 = %1$S’s %2$S on %3$S
+
+# %S is the date and time at which the last sync successfully completed
+lastSync2.label = Last sync: %S
+
+# signInToSync.description is the tooltip for the Sync buttons when Sync is
+# not configured.
+signInToSync.description = Sign In To Sync
+
+error.sync.title = Error While Syncing
+error.sync.description = Sync encountered an error while syncing: %1$S. Sync will automatically retry this action.
+warning.sync.eol.label = Service Shutting Down
+# %1: the app name (Firefox)
+warning.sync.eol.description = Your Firefox Sync service is shutting down soon. Upgrade %1$S to keep syncing.
+error.sync.eol.label = Service Unavailable
+# %1: the app name (Firefox)
+error.sync.eol.description = Your Firefox Sync service is no longer available. You need to upgrade %1$S to keep syncing.
+sync.eol.learnMore.label = Learn more
+sync.eol.learnMore.accesskey = L
+
+syncnow.label = Sync Now
+syncing2.label = Syncing…
diff --git a/services/sync/locales/jar.mn b/services/sync/locales/jar.mn
new file mode 100644
index 000000000..ab9848f9d
--- /dev/null
+++ b/services/sync/locales/jar.mn
@@ -0,0 +1,10 @@
+#filter substitution
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+@AB_CD@.jar:
+% locale weave @AB_CD@ %locale/@AB_CD@/
+ locale/@AB_CD@/services/errors.properties (%errors.properties)
+ locale/@AB_CD@/services/sync.properties (%sync.properties)
diff --git a/services/sync/locales/l10n.ini b/services/sync/locales/l10n.ini
new file mode 100644
index 000000000..d9c1ef945
--- /dev/null
+++ b/services/sync/locales/l10n.ini
@@ -0,0 +1,9 @@
+; This Source Code Form is subject to the terms of the Mozilla Public
+; License, v. 2.0. If a copy of the MPL was not distributed with this
+; file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+[general]
+depth = ../../..
+
+[compare]
+dirs = services/sync
diff --git a/services/sync/locales/moz.build b/services/sync/locales/moz.build
new file mode 100644
index 000000000..aac3a838c
--- /dev/null
+++ b/services/sync/locales/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+JAR_MANIFESTS += ['jar.mn']
diff --git a/services/sync/modules-testing/fakeservices.js b/services/sync/modules-testing/fakeservices.js
new file mode 100644
index 000000000..2895736df
--- /dev/null
+++ b/services/sync/modules-testing/fakeservices.js
@@ -0,0 +1,131 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "FakeCryptoService",
+ "FakeFilesystemService",
+ "FakeGUIDService",
+ "fakeSHA256HMAC",
+];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+
+var btoa = Cu.import("resource://gre/modules/Log.jsm").btoa;
+
+this.FakeFilesystemService = function FakeFilesystemService(contents) {
+ this.fakeContents = contents;
+ let self = this;
+
+ // Save away the unmocked versions of the functions we replace here for tests
+ // that really want the originals. As this may be called many times per test,
+ // we must be careful to not replace them with ones we previously replaced.
+ // (And WTF are we bothering with these mocks in the first place? Is the
+ // performance of the filesystem *really* such that it outweighs the downside
+ // of not running our real JSON functions in the tests? Eg, these mocks don't
+ // always throw exceptions when the real ones do. Anyway...)
+ for (let name of ["jsonSave", "jsonLoad", "jsonMove", "jsonRemove"]) {
+ let origName = "_real_" + name;
+ if (!Utils[origName]) {
+ Utils[origName] = Utils[name];
+ }
+ }
+
+ Utils.jsonSave = function jsonSave(filePath, that, obj, callback) {
+ let json = typeof obj == "function" ? obj.call(that) : obj;
+ self.fakeContents["weave/" + filePath + ".json"] = JSON.stringify(json);
+ callback.call(that);
+ };
+
+ Utils.jsonLoad = function jsonLoad(filePath, that, cb) {
+ let obj;
+ let json = self.fakeContents["weave/" + filePath + ".json"];
+ if (json) {
+ obj = JSON.parse(json);
+ }
+ cb.call(that, obj);
+ };
+
+ Utils.jsonMove = function jsonMove(aFrom, aTo, that) {
+ const fromPath = "weave/" + aFrom + ".json";
+ self.fakeContents["weave/" + aTo + ".json"] = self.fakeContents[fromPath];
+ delete self.fakeContents[fromPath];
+ return Promise.resolve();
+ };
+
+ Utils.jsonRemove = function jsonRemove(filePath, that) {
+ delete self.fakeContents["weave/" + filePath + ".json"];
+ return Promise.resolve();
+ };
+};
+
+this.fakeSHA256HMAC = function fakeSHA256HMAC(message) {
+ message = message.substr(0, 64);
+ while (message.length < 64) {
+ message += " ";
+ }
+ return message;
+}
+
+this.FakeGUIDService = function FakeGUIDService() {
+ let latestGUID = 0;
+
+ Utils.makeGUID = function makeGUID() {
+ // ensure that this always returns a unique 12 character string
+ let nextGUID = "fake-guid-" + String(latestGUID++).padStart(2, "0");
+ return nextGUID.slice(nextGUID.length-12, nextGUID.length);
+ };
+}
+
+/*
+ * Mock implementation of WeaveCrypto. It does not encrypt or
+ * decrypt, merely returning the input verbatim.
+ */
+this.FakeCryptoService = function FakeCryptoService() {
+ this.counter = 0;
+
+ delete Svc.Crypto; // get rid of the getter first
+ Svc.Crypto = this;
+
+ CryptoWrapper.prototype.ciphertextHMAC = function ciphertextHMAC(keyBundle) {
+ return fakeSHA256HMAC(this.ciphertext);
+ };
+}
+FakeCryptoService.prototype = {
+
+ encrypt: function encrypt(clearText, symmetricKey, iv) {
+ return clearText;
+ },
+
+ decrypt: function decrypt(cipherText, symmetricKey, iv) {
+ return cipherText;
+ },
+
+ generateRandomKey: function generateRandomKey() {
+ return btoa("fake-symmetric-key-" + this.counter++);
+ },
+
+ generateRandomIV: function generateRandomIV() {
+ // A base64-encoded IV is 24 characters long
+ return btoa("fake-fake-fake-random-iv");
+ },
+
+ expandData: function expandData(data, len) {
+ return data;
+ },
+
+ deriveKeyFromPassphrase: function deriveKeyFromPassphrase(passphrase,
+ salt, keyLength) {
+ return "some derived key string composed of bytes";
+ },
+
+ generateRandomBytes: function generateRandomBytes(byteCount) {
+ return "not-so-random-now-are-we-HA-HA-HA! >:)".slice(byteCount);
+ }
+};
+
diff --git a/services/sync/modules-testing/fxa_utils.js b/services/sync/modules-testing/fxa_utils.js
new file mode 100644
index 000000000..70aa17b03
--- /dev/null
+++ b/services/sync/modules-testing/fxa_utils.js
@@ -0,0 +1,58 @@
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "initializeIdentityWithTokenServerResponse",
+];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/main.js");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://services-common/tokenserverclient.js");
+Cu.import("resource://testing-common/services/common/logging.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+// Create a new browserid_identity object and initialize it with a
+// mocked TokenServerClient which always receives the specified response.
+this.initializeIdentityWithTokenServerResponse = function(response) {
+ // First create a mock "request" object that well' hack into the token server.
+ // A log for it
+ let requestLog = Log.repository.getLogger("testing.mock-rest");
+ if (!requestLog.appenders.length) { // might as well see what it says :)
+ requestLog.addAppender(new Log.DumpAppender());
+ requestLog.level = Log.Level.Trace;
+ }
+
+ // A mock request object.
+ function MockRESTRequest(url) {};
+ MockRESTRequest.prototype = {
+ _log: requestLog,
+ setHeader: function() {},
+ get: function(callback) {
+ this.response = response;
+ callback.call(this);
+ }
+ }
+ // The mocked TokenServer client which will get the response.
+ function MockTSC() { }
+ MockTSC.prototype = new TokenServerClient();
+ MockTSC.prototype.constructor = MockTSC;
+ MockTSC.prototype.newRESTRequest = function(url) {
+ return new MockRESTRequest(url);
+ }
+ // Arrange for the same observerPrefix as browserid_identity uses.
+ MockTSC.prototype.observerPrefix = "weave:service";
+
+ // tie it all together.
+ Weave.Status.__authManager = Weave.Service.identity = new BrowserIDManager();
+ Weave.Service._clusterManager = Weave.Service.identity.createClusterManager(Weave.Service);
+ let browseridManager = Weave.Service.identity;
+ // a sanity check
+ if (!(browseridManager instanceof BrowserIDManager)) {
+ throw new Error("sync isn't configured for browserid_identity");
+ }
+ let mockTSC = new MockTSC()
+ configureFxAccountIdentity(browseridManager);
+ browseridManager._tokenServerClient = mockTSC;
+}
diff --git a/services/sync/modules-testing/rotaryengine.js b/services/sync/modules-testing/rotaryengine.js
new file mode 100644
index 000000000..9d3bf723d
--- /dev/null
+++ b/services/sync/modules-testing/rotaryengine.js
@@ -0,0 +1,124 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "RotaryEngine",
+ "RotaryRecord",
+ "RotaryStore",
+ "RotaryTracker",
+];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+
+/*
+ * A fake engine implementation.
+ * This is used all over the place.
+ *
+ * Complete with record, store, and tracker implementations.
+ */
+
+this.RotaryRecord = function RotaryRecord(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+RotaryRecord.prototype = {
+ __proto__: CryptoWrapper.prototype
+};
+Utils.deferGetSet(RotaryRecord, "cleartext", ["denomination"]);
+
+this.RotaryStore = function RotaryStore(name, engine) {
+ Store.call(this, name, engine);
+ this.items = {};
+}
+RotaryStore.prototype = {
+ __proto__: Store.prototype,
+
+ create: function create(record) {
+ this.items[record.id] = record.denomination;
+ },
+
+ remove: function remove(record) {
+ delete this.items[record.id];
+ },
+
+ update: function update(record) {
+ this.items[record.id] = record.denomination;
+ },
+
+ itemExists: function itemExists(id) {
+ return (id in this.items);
+ },
+
+ createRecord: function createRecord(id, collection) {
+ let record = new RotaryRecord(collection, id);
+
+ if (!(id in this.items)) {
+ record.deleted = true;
+ return record;
+ }
+
+ record.denomination = this.items[id] || "Data for new record: " + id;
+ return record;
+ },
+
+ changeItemID: function changeItemID(oldID, newID) {
+ if (oldID in this.items) {
+ this.items[newID] = this.items[oldID];
+ }
+
+ delete this.items[oldID];
+ },
+
+ getAllIDs: function getAllIDs() {
+ let ids = {};
+ for (let id in this.items) {
+ ids[id] = true;
+ }
+ return ids;
+ },
+
+ wipe: function wipe() {
+ this.items = {};
+ }
+};
+
+this.RotaryTracker = function RotaryTracker(name, engine) {
+ Tracker.call(this, name, engine);
+}
+RotaryTracker.prototype = {
+ __proto__: Tracker.prototype
+};
+
+
+this.RotaryEngine = function RotaryEngine(service) {
+ SyncEngine.call(this, "Rotary", service);
+ // Ensure that the engine starts with a clean slate.
+ this.toFetch = [];
+ this.previousFailed = [];
+}
+RotaryEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _storeObj: RotaryStore,
+ _trackerObj: RotaryTracker,
+ _recordObj: RotaryRecord,
+
+ _findDupe: function _findDupe(item) {
+ // This is a semaphore used for testing proper reconciling on dupe
+ // detection.
+ if (item.id == "DUPE_INCOMING") {
+ return "DUPE_LOCAL";
+ }
+
+ for (let [id, value] of Object.entries(this._store.items)) {
+ if (item.denomination == value) {
+ return id;
+ }
+ }
+ }
+};
diff --git a/services/sync/modules-testing/utils.js b/services/sync/modules-testing/utils.js
new file mode 100644
index 000000000..261c2bb21
--- /dev/null
+++ b/services/sync/modules-testing/utils.js
@@ -0,0 +1,350 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "btoa", // It comes from a module import.
+ "encryptPayload",
+ "isConfiguredWithLegacyIdentity",
+ "ensureLegacyIdentityManager",
+ "setBasicCredentials",
+ "makeIdentityConfig",
+ "makeFxAccountsInternalMock",
+ "configureFxAccountIdentity",
+ "configureIdentity",
+ "SyncTestingInfrastructure",
+ "waitForZeroTimer",
+ "Promise", // from a module import
+ "add_identity_test",
+ "MockFxaStorageManager",
+ "AccountState", // from a module import
+ "sumHistogram",
+];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://testing-common/services/common/logging.js");
+Cu.import("resource://testing-common/services/sync/fakeservices.js");
+Cu.import("resource://gre/modules/FxAccounts.jsm");
+Cu.import("resource://gre/modules/FxAccountsClient.jsm");
+Cu.import("resource://gre/modules/FxAccountsCommon.js");
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+
+// and grab non-exported stuff via a backstage pass.
+const {AccountState} = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
+
+// A mock "storage manager" for FxAccounts that doesn't actually write anywhere.
+function MockFxaStorageManager() {
+}
+
+MockFxaStorageManager.prototype = {
+ promiseInitialized: Promise.resolve(),
+
+ initialize(accountData) {
+ this.accountData = accountData;
+ },
+
+ finalize() {
+ return Promise.resolve();
+ },
+
+ getAccountData() {
+ return Promise.resolve(this.accountData);
+ },
+
+ updateAccountData(updatedFields) {
+ for (let [name, value] of Object.entries(updatedFields)) {
+ if (value == null) {
+ delete this.accountData[name];
+ } else {
+ this.accountData[name] = value;
+ }
+ }
+ return Promise.resolve();
+ },
+
+ deleteAccountData() {
+ this.accountData = null;
+ return Promise.resolve();
+ }
+}
+
+/**
+ * First wait >100ms (nsITimers can take up to that much time to fire, so
+ * we can account for the timer in delayedAutoconnect) and then two event
+ * loop ticks (to account for the Utils.nextTick() in autoConnect).
+ */
+this.waitForZeroTimer = function waitForZeroTimer(callback) {
+ let ticks = 2;
+ function wait() {
+ if (ticks) {
+ ticks -= 1;
+ CommonUtils.nextTick(wait);
+ return;
+ }
+ callback();
+ }
+ CommonUtils.namedTimer(wait, 150, {}, "timer");
+}
+
+/**
+ * Return true if Sync is configured with the "legacy" identity provider.
+ */
+this.isConfiguredWithLegacyIdentity = function() {
+ let ns = {};
+ Cu.import("resource://services-sync/service.js", ns);
+
+ // We can't use instanceof as BrowserIDManager (the "other" identity) inherits
+ // from IdentityManager so that would return true - so check the prototype.
+ return Object.getPrototypeOf(ns.Service.identity) === IdentityManager.prototype;
+}
+
+/**
+ * Ensure Sync is configured with the "legacy" identity provider.
+ */
+this.ensureLegacyIdentityManager = function() {
+ let ns = {};
+ Cu.import("resource://services-sync/service.js", ns);
+
+ Status.__authManager = ns.Service.identity = new IdentityManager();
+ ns.Service._clusterManager = ns.Service.identity.createClusterManager(ns.Service);
+}
+
+this.setBasicCredentials =
+ function setBasicCredentials(username, password, syncKey) {
+ let ns = {};
+ Cu.import("resource://services-sync/service.js", ns);
+
+ let auth = ns.Service.identity;
+ auth.username = username;
+ auth.basicPassword = password;
+ auth.syncKey = syncKey;
+}
+
+// Return an identity configuration suitable for testing with our identity
+// providers. |overrides| can specify overrides for any default values.
+this.makeIdentityConfig = function(overrides) {
+ // first setup the defaults.
+ let result = {
+ // Username used in both fxaccount and sync identity configs.
+ username: "foo",
+ // fxaccount specific credentials.
+ fxaccount: {
+ user: {
+ assertion: 'assertion',
+ email: 'email',
+ kA: 'kA',
+ kB: 'kB',
+ sessionToken: 'sessionToken',
+ uid: "a".repeat(32),
+ verified: true,
+ },
+ token: {
+ endpoint: null,
+ duration: 300,
+ id: "id",
+ key: "key",
+ hashed_fxa_uid: "f".repeat(32), // used during telemetry validation
+ // uid will be set to the username.
+ }
+ },
+ sync: {
+ // username will come from the top-level username
+ password: "whatever",
+ syncKey: "abcdeabcdeabcdeabcdeabcdea",
+ }
+ };
+
+ // Now handle any specified overrides.
+ if (overrides) {
+ if (overrides.username) {
+ result.username = overrides.username;
+ }
+ if (overrides.sync) {
+ // TODO: allow just some attributes to be specified
+ result.sync = overrides.sync;
+ }
+ if (overrides.fxaccount) {
+ // TODO: allow just some attributes to be specified
+ result.fxaccount = overrides.fxaccount;
+ }
+ }
+ return result;
+}
+
+this.makeFxAccountsInternalMock = function(config) {
+ return {
+ newAccountState(credentials) {
+ // We only expect this to be called with null indicating the (mock)
+ // storage should be read.
+ if (credentials) {
+ throw new Error("Not expecting to have credentials passed");
+ }
+ let storageManager = new MockFxaStorageManager();
+ storageManager.initialize(config.fxaccount.user);
+ let accountState = new AccountState(storageManager);
+ return accountState;
+ },
+ _getAssertion(audience) {
+ return Promise.resolve("assertion");
+ },
+ };
+};
+
+// Configure an instance of an FxAccount identity provider with the specified
+// config (or the default config if not specified).
+this.configureFxAccountIdentity = function(authService,
+ config = makeIdentityConfig(),
+ fxaInternal = makeFxAccountsInternalMock(config)) {
+ // until we get better test infrastructure for bid_identity, we set the
+ // signedin user's "email" to the username, simply as many tests rely on this.
+ config.fxaccount.user.email = config.username;
+
+ let fxa = new FxAccounts(fxaInternal);
+
+ let MockFxAccountsClient = function() {
+ FxAccountsClient.apply(this);
+ };
+ MockFxAccountsClient.prototype = {
+ __proto__: FxAccountsClient.prototype,
+ accountStatus() {
+ return Promise.resolve(true);
+ }
+ };
+ let mockFxAClient = new MockFxAccountsClient();
+ fxa.internal._fxAccountsClient = mockFxAClient;
+
+ let mockTSC = { // TokenServerClient
+ getTokenFromBrowserIDAssertion: function(uri, assertion, cb) {
+ config.fxaccount.token.uid = config.username;
+ cb(null, config.fxaccount.token);
+ },
+ };
+ authService._fxaService = fxa;
+ authService._tokenServerClient = mockTSC;
+ // Set the "account" of the browserId manager to be the "email" of the
+ // logged in user of the mockFXA service.
+ authService._signedInUser = config.fxaccount.user;
+ authService._account = config.fxaccount.user.email;
+}
+
+this.configureIdentity = function(identityOverrides) {
+ let config = makeIdentityConfig(identityOverrides);
+ let ns = {};
+ Cu.import("resource://services-sync/service.js", ns);
+
+ if (ns.Service.identity instanceof BrowserIDManager) {
+ // do the FxAccounts thang...
+ configureFxAccountIdentity(ns.Service.identity, config);
+ return ns.Service.identity.initializeWithCurrentIdentity().then(() => {
+ // need to wait until this identity manager is readyToAuthenticate.
+ return ns.Service.identity.whenReadyToAuthenticate.promise;
+ });
+ }
+ // old style identity provider.
+ setBasicCredentials(config.username, config.sync.password, config.sync.syncKey);
+ let deferred = Promise.defer();
+ deferred.resolve();
+ return deferred.promise;
+}
+
+this.SyncTestingInfrastructure = function (server, username, password, syncKey) {
+ let ns = {};
+ Cu.import("resource://services-sync/service.js", ns);
+
+ ensureLegacyIdentityManager();
+ let config = makeIdentityConfig();
+ // XXX - hacks for the sync identity provider.
+ if (username)
+ config.username = username;
+ if (password)
+ config.sync.password = password;
+ if (syncKey)
+ config.sync.syncKey = syncKey;
+ let cb = Async.makeSpinningCallback();
+ configureIdentity(config).then(cb, cb);
+ cb.wait();
+
+ let i = server.identity;
+ let uri = i.primaryScheme + "://" + i.primaryHost + ":" +
+ i.primaryPort + "/";
+
+ ns.Service.serverURL = uri;
+ ns.Service.clusterURL = uri;
+
+ this.logStats = initTestLogging();
+ this.fakeFilesystem = new FakeFilesystemService({});
+ this.fakeGUIDService = new FakeGUIDService();
+ this.fakeCryptoService = new FakeCryptoService();
+}
+
+/**
+ * Turn WBO cleartext into fake "encrypted" payload as it goes over the wire.
+ */
+this.encryptPayload = function encryptPayload(cleartext) {
+ if (typeof cleartext == "object") {
+ cleartext = JSON.stringify(cleartext);
+ }
+
+ return {
+ ciphertext: cleartext, // ciphertext == cleartext with fake crypto
+ IV: "irrelevant",
+ hmac: fakeSHA256HMAC(cleartext, CryptoUtils.makeHMACKey("")),
+ };
+}
+
+// This helper can be used instead of 'add_test' or 'add_task' to run the
+// specified test function twice - once with the old-style sync identity
+// manager and once with the new-style BrowserID identity manager, to ensure
+// it works in both cases.
+//
+// * The test itself should be passed as 'test' - ie, test code will generally
+// pass |this|.
+// * The test function is a regular test function - although note that it must
+// be a generator - async operations should yield them, and run_next_test
+// mustn't be called.
+this.add_identity_test = function(test, testFunction) {
+ function note(what) {
+ let msg = "running test " + testFunction.name + " with " + what + " identity manager";
+ test.do_print(msg);
+ }
+ let ns = {};
+ Cu.import("resource://services-sync/service.js", ns);
+ // one task for the "old" identity manager.
+ test.add_task(function* () {
+ note("sync");
+ let oldIdentity = Status._authManager;
+ ensureLegacyIdentityManager();
+ yield testFunction();
+ Status.__authManager = ns.Service.identity = oldIdentity;
+ });
+ // another task for the FxAccounts identity manager.
+ test.add_task(function* () {
+ note("FxAccounts");
+ let oldIdentity = Status._authManager;
+ Status.__authManager = ns.Service.identity = new BrowserIDManager();
+ yield testFunction();
+ Status.__authManager = ns.Service.identity = oldIdentity;
+ });
+}
+
+this.sumHistogram = function(name, options = {}) {
+ let histogram = options.key ? Services.telemetry.getKeyedHistogramById(name) :
+ Services.telemetry.getHistogramById(name);
+ let snapshot = histogram.snapshot(options.key);
+ let sum = -Infinity;
+ if (snapshot) {
+ sum = snapshot.sum;
+ }
+ histogram.clear();
+ return sum;
+}
diff --git a/services/sync/modules/FxaMigrator.jsm b/services/sync/modules/FxaMigrator.jsm
new file mode 100644
index 000000000..735b60144
--- /dev/null
+++ b/services/sync/modules/FxaMigrator.jsm
@@ -0,0 +1,99 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict;"
+
+// Note that this module used to supervise the step-by-step migration from
+// a legacy Sync account to a FxA-based Sync account. In bug 1205928, this
+// changed to automatically disconnect the legacy Sync account.
+
+const {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+
+XPCOMUtils.defineLazyGetter(this, "WeaveService", function() {
+ return Cc["@mozilla.org/weave/service;1"]
+ .getService(Components.interfaces.nsISupports)
+ .wrappedJSObject;
+});
+
+XPCOMUtils.defineLazyModuleGetter(this, "Weave",
+ "resource://services-sync/main.js");
+
+// We send this notification when we perform the disconnection. The browser
+// window will show a one-off notification bar.
+const OBSERVER_STATE_CHANGE_TOPIC = "fxa-migration:state-changed";
+
+const OBSERVER_TOPICS = [
+ "xpcom-shutdown",
+ "weave:eol",
+];
+
+function Migrator() {
+ // Leave the log-level as Debug - Sync will setup log appenders such that
+ // these messages generally will not be seen unless other log related
+ // prefs are set.
+ this.log.level = Log.Level.Debug;
+
+ for (let topic of OBSERVER_TOPICS) {
+ Services.obs.addObserver(this, topic, false);
+ }
+}
+
+Migrator.prototype = {
+ log: Log.repository.getLogger("Sync.SyncMigration"),
+
+ finalize() {
+ for (let topic of OBSERVER_TOPICS) {
+ Services.obs.removeObserver(this, topic);
+ }
+ },
+
+ observe(subject, topic, data) {
+ this.log.debug("observed " + topic);
+ switch (topic) {
+ case "xpcom-shutdown":
+ this.finalize();
+ break;
+
+ default:
+ // this notification when configured with legacy Sync means we want to
+ // disconnect
+ if (!WeaveService.fxAccountsEnabled) {
+ this.log.info("Disconnecting from legacy Sync");
+ // Set up an observer for when the disconnection is complete.
+ let observe;
+ Services.obs.addObserver(observe = () => {
+ this.log.info("observed that startOver is complete");
+ Services.obs.removeObserver(observe, "weave:service:start-over:finish");
+ // Send the notification for the UI.
+ Services.obs.notifyObservers(null, OBSERVER_STATE_CHANGE_TOPIC, null);
+ }, "weave:service:start-over:finish", false);
+
+ // Do the disconnection.
+ Weave.Service.startOver();
+ }
+ }
+ },
+
+ get learnMoreLink() {
+ try {
+ var url = Services.prefs.getCharPref("app.support.baseURL");
+ } catch (err) {
+ return null;
+ }
+ url += "sync-upgrade";
+ let sb = Services.strings.createBundle("chrome://weave/locale/services/sync.properties");
+ return {
+ text: sb.GetStringFromName("sync.eol.learnMore.label"),
+ href: Services.urlFormatter.formatURL(url),
+ };
+ },
+};
+
+// We expose a singleton
+this.EXPORTED_SYMBOLS = ["fxaMigrator"];
+var fxaMigrator = new Migrator();
diff --git a/services/sync/modules/SyncedTabs.jsm b/services/sync/modules/SyncedTabs.jsm
new file mode 100644
index 000000000..1a69e3564
--- /dev/null
+++ b/services/sync/modules/SyncedTabs.jsm
@@ -0,0 +1,301 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = ["SyncedTabs"];
+
+
+const { classes: Cc, interfaces: Ci, results: Cr, utils: Cu } = Components;
+
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/PlacesUtils.jsm", this);
+Cu.import("resource://services-sync/main.js");
+Cu.import("resource://gre/modules/Preferences.jsm");
+
+// The Sync XPCOM service
+XPCOMUtils.defineLazyGetter(this, "weaveXPCService", function() {
+ return Cc["@mozilla.org/weave/service;1"]
+ .getService(Ci.nsISupports)
+ .wrappedJSObject;
+});
+
+// from MDN...
+function escapeRegExp(string) {
+ return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
+}
+
+// A topic we fire whenever we have new tabs available. This might be due
+// to a request made by this module to refresh the tab list, or as the result
+// of a regularly scheduled sync. The intent is that consumers just listen
+// for this notification and update their UI in response.
+const TOPIC_TABS_CHANGED = "services.sync.tabs.changed";
+
+// The interval, in seconds, before which we consider the existing list
+// of tabs "fresh enough" and don't force a new sync.
+const TABS_FRESH_ENOUGH_INTERVAL = 30;
+
+let log = Log.repository.getLogger("Sync.RemoteTabs");
+// A new scope to do the logging thang...
+(function() {
+ let level = Preferences.get("services.sync.log.logger.tabs");
+ if (level) {
+ let appender = new Log.DumpAppender();
+ log.level = appender.level = Log.Level[level] || Log.Level.Debug;
+ log.addAppender(appender);
+ }
+})();
+
+
+// A private singleton that does the work.
+let SyncedTabsInternal = {
+ /* Make a "tab" record. Returns a promise */
+ _makeTab: Task.async(function* (client, tab, url, showRemoteIcons) {
+ let icon;
+ if (showRemoteIcons) {
+ icon = tab.icon;
+ }
+ if (!icon) {
+ try {
+ icon = (yield PlacesUtils.promiseFaviconLinkUrl(url)).spec;
+ } catch (ex) { /* no favicon avaiable */ }
+ }
+ if (!icon) {
+ icon = "";
+ }
+ return {
+ type: "tab",
+ title: tab.title || url,
+ url,
+ icon,
+ client: client.id,
+ lastUsed: tab.lastUsed,
+ };
+ }),
+
+ /* Make a "client" record. Returns a promise for consistency with _makeTab */
+ _makeClient: Task.async(function* (client) {
+ return {
+ id: client.id,
+ type: "client",
+ name: Weave.Service.clientsEngine.getClientName(client.id),
+ isMobile: Weave.Service.clientsEngine.isMobile(client.id),
+ lastModified: client.lastModified * 1000, // sec to ms
+ tabs: []
+ };
+ }),
+
+ _tabMatchesFilter(tab, filter) {
+ let reFilter = new RegExp(escapeRegExp(filter), "i");
+ return tab.url.match(reFilter) || tab.title.match(reFilter);
+ },
+
+ getTabClients: Task.async(function* (filter) {
+ log.info("Generating tab list with filter", filter);
+ let result = [];
+
+ // If Sync isn't ready, don't try and get anything.
+ if (!weaveXPCService.ready) {
+ log.debug("Sync isn't yet ready, so returning an empty tab list");
+ return result;
+ }
+
+ // A boolean that controls whether we should show the icon from the remote tab.
+ const showRemoteIcons = Preferences.get("services.sync.syncedTabs.showRemoteIcons", true);
+
+ let engine = Weave.Service.engineManager.get("tabs");
+
+ let seenURLs = new Set();
+ let parentIndex = 0;
+ let ntabs = 0;
+
+ for (let [guid, client] of Object.entries(engine.getAllClients())) {
+ if (!Weave.Service.clientsEngine.remoteClientExists(client.id)) {
+ continue;
+ }
+ let clientRepr = yield this._makeClient(client);
+ log.debug("Processing client", clientRepr);
+
+ for (let tab of client.tabs) {
+ let url = tab.urlHistory[0];
+ log.debug("remote tab", url);
+ // Note there are some issues with tracking "seen" tabs, including:
+ // * We really can't return the entire urlHistory record as we are
+ // only checking the first entry - others might be different.
+ // * We don't update the |lastUsed| timestamp to reflect the
+ // most-recently-seen time.
+ // In a followup we should consider simply dropping this |seenUrls|
+ // check and return duplicate records - it seems the user will be more
+ // confused by tabs not showing up on a device (because it was detected
+ // as a dupe so it only appears on a different device) than being
+ // confused by seeing the same tab on different clients.
+ if (!url || seenURLs.has(url)) {
+ continue;
+ }
+ let tabRepr = yield this._makeTab(client, tab, url, showRemoteIcons);
+ if (filter && !this._tabMatchesFilter(tabRepr, filter)) {
+ continue;
+ }
+ seenURLs.add(url);
+ clientRepr.tabs.push(tabRepr);
+ }
+ // We return all clients, even those without tabs - the consumer should
+ // filter it if they care.
+ ntabs += clientRepr.tabs.length;
+ result.push(clientRepr);
+ }
+ log.info(`Final tab list has ${result.length} clients with ${ntabs} tabs.`);
+ return result;
+ }),
+
+ syncTabs(force) {
+ if (!force) {
+ // Don't bother refetching tabs if we already did so recently
+ let lastFetch = Preferences.get("services.sync.lastTabFetch", 0);
+ let now = Math.floor(Date.now() / 1000);
+ if (now - lastFetch < TABS_FRESH_ENOUGH_INTERVAL) {
+ log.info("_refetchTabs was done recently, do not doing it again");
+ return Promise.resolve(false);
+ }
+ }
+
+ // If Sync isn't configured don't try and sync, else we will get reports
+ // of a login failure.
+ if (Weave.Status.checkSetup() == Weave.CLIENT_NOT_CONFIGURED) {
+ log.info("Sync client is not configured, so not attempting a tab sync");
+ return Promise.resolve(false);
+ }
+ // Ask Sync to just do the tabs engine if it can.
+ // Sync is currently synchronous, so do it after an event-loop spin to help
+ // keep the UI responsive.
+ return new Promise((resolve, reject) => {
+ Services.tm.currentThread.dispatch(() => {
+ try {
+ log.info("Doing a tab sync.");
+ Weave.Service.sync(["tabs"]);
+ resolve(true);
+ } catch (ex) {
+ log.error("Sync failed", ex);
+ reject(ex);
+ };
+ }, Ci.nsIThread.DISPATCH_NORMAL);
+ });
+ },
+
+ observe(subject, topic, data) {
+ log.trace(`observed topic=${topic}, data=${data}, subject=${subject}`);
+ switch (topic) {
+ case "weave:engine:sync:finish":
+ if (data != "tabs") {
+ return;
+ }
+ // The tabs engine just finished syncing
+ // Set our lastTabFetch pref here so it tracks both explicit sync calls
+ // and normally scheduled ones.
+ Preferences.set("services.sync.lastTabFetch", Math.floor(Date.now() / 1000));
+ Services.obs.notifyObservers(null, TOPIC_TABS_CHANGED, null);
+ break;
+ case "weave:service:start-over":
+ // start-over needs to notify so consumers find no tabs.
+ Preferences.reset("services.sync.lastTabFetch");
+ Services.obs.notifyObservers(null, TOPIC_TABS_CHANGED, null);
+ break;
+ case "nsPref:changed":
+ Services.obs.notifyObservers(null, TOPIC_TABS_CHANGED, null);
+ break;
+ default:
+ break;
+ }
+ },
+
+ // Returns true if Sync is configured to Sync tabs, false otherwise
+ get isConfiguredToSyncTabs() {
+ if (!weaveXPCService.ready) {
+ log.debug("Sync isn't yet ready; assuming tab engine is enabled");
+ return true;
+ }
+
+ let engine = Weave.Service.engineManager.get("tabs");
+ return engine && engine.enabled;
+ },
+
+ get hasSyncedThisSession() {
+ let engine = Weave.Service.engineManager.get("tabs");
+ return engine && engine.hasSyncedThisSession;
+ },
+};
+
+Services.obs.addObserver(SyncedTabsInternal, "weave:engine:sync:finish", false);
+Services.obs.addObserver(SyncedTabsInternal, "weave:service:start-over", false);
+// Observe the pref the indicates the state of the tabs engine has changed.
+// This will force consumers to re-evaluate the state of sync and update
+// accordingly.
+Services.prefs.addObserver("services.sync.engine.tabs", SyncedTabsInternal, false);
+
+// The public interface.
+this.SyncedTabs = {
+ // A mock-point for tests.
+ _internal: SyncedTabsInternal,
+
+ // We make the topic for the observer notification public.
+ TOPIC_TABS_CHANGED,
+
+ // Returns true if Sync is configured to Sync tabs, false otherwise
+ get isConfiguredToSyncTabs() {
+ return this._internal.isConfiguredToSyncTabs;
+ },
+
+ // Returns true if a tab sync has completed once this session. If this
+ // returns false, then getting back no clients/tabs possibly just means we
+ // are waiting for that first sync to complete.
+ get hasSyncedThisSession() {
+ return this._internal.hasSyncedThisSession;
+ },
+
+ // Return a promise that resolves with an array of client records, each with
+ // a .tabs array. Note that part of the contract for this module is that the
+ // returned objects are not shared between invocations, so callers are free
+ // to mutate the returned objects (eg, sort, truncate) however they see fit.
+ getTabClients(query) {
+ return this._internal.getTabClients(query);
+ },
+
+ // Starts a background request to start syncing tabs. Returns a promise that
+ // resolves when the sync is complete, but there's no resolved value -
+ // callers should be listening for TOPIC_TABS_CHANGED.
+ // If |force| is true we always sync. If false, we only sync if the most
+ // recent sync wasn't "recently".
+ syncTabs(force) {
+ return this._internal.syncTabs(force);
+ },
+
+ sortTabClientsByLastUsed(clients, maxTabs = Infinity) {
+ // First sort and filter the list of tabs for each client. Note that
+ // this module promises that the objects it returns are never
+ // shared, so we are free to mutate those objects directly.
+ for (let client of clients) {
+ let tabs = client.tabs;
+ tabs.sort((a, b) => b.lastUsed - a.lastUsed);
+ if (Number.isFinite(maxTabs)) {
+ client.tabs = tabs.slice(0, maxTabs);
+ }
+ }
+ // Now sort the clients - the clients are sorted in the order of the
+ // most recent tab for that client (ie, it is important the tabs for
+ // each client are already sorted.)
+ clients.sort((a, b) => {
+ if (a.tabs.length == 0) {
+ return 1; // b comes first.
+ }
+ if (b.tabs.length == 0) {
+ return -1; // a comes first.
+ }
+ return b.tabs[0].lastUsed - a.tabs[0].lastUsed;
+ });
+ },
+};
+
diff --git a/services/sync/modules/addonsreconciler.js b/services/sync/modules/addonsreconciler.js
new file mode 100644
index 000000000..a60fc8d56
--- /dev/null
+++ b/services/sync/modules/addonsreconciler.js
@@ -0,0 +1,676 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * This file contains middleware to reconcile state of AddonManager for
+ * purposes of tracking events for Sync. The content in this file exists
+ * because AddonManager does not have a getChangesSinceX() API and adding
+ * that functionality properly was deemed too time-consuming at the time
+ * add-on sync was originally written. If/when AddonManager adds this API,
+ * this file can go away and the add-ons engine can be rewritten to use it.
+ *
+ * It was decided to have this tracking functionality exist in a separate
+ * standalone file so it could be more easily understood, tested, and
+ * hopefully ported.
+ */
+
+"use strict";
+
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://gre/modules/AddonManager.jsm");
+
+const DEFAULT_STATE_FILE = "addonsreconciler";
+
+this.CHANGE_INSTALLED = 1;
+this.CHANGE_UNINSTALLED = 2;
+this.CHANGE_ENABLED = 3;
+this.CHANGE_DISABLED = 4;
+
+this.EXPORTED_SYMBOLS = ["AddonsReconciler", "CHANGE_INSTALLED",
+ "CHANGE_UNINSTALLED", "CHANGE_ENABLED",
+ "CHANGE_DISABLED"];
+/**
+ * Maintains state of add-ons.
+ *
+ * State is maintained in 2 data structures, an object mapping add-on IDs
+ * to metadata and an array of changes over time. The object mapping can be
+ * thought of as a minimal copy of data from AddonManager which is needed for
+ * Sync. The array is effectively a log of changes over time.
+ *
+ * The data structures are persisted to disk by serializing to a JSON file in
+ * the current profile. The data structures are updated by 2 mechanisms. First,
+ * they can be refreshed from the global state of the AddonManager. This is a
+ * sure-fire way of ensuring the reconciler is up to date. Second, the
+ * reconciler adds itself as an AddonManager listener. When it receives change
+ * notifications, it updates its internal state incrementally.
+ *
+ * The internal state is persisted to a JSON file in the profile directory.
+ *
+ * An instance of this is bound to an AddonsEngine instance. In reality, it
+ * likely exists as a singleton. To AddonsEngine, it functions as a store and
+ * an entity which emits events for tracking.
+ *
+ * The usage pattern for instances of this class is:
+ *
+ * let reconciler = new AddonsReconciler();
+ * reconciler.loadState(null, function(error) { ... });
+ *
+ * // At this point, your instance should be ready to use.
+ *
+ * When you are finished with the instance, please call:
+ *
+ * reconciler.stopListening();
+ * reconciler.saveState(...);
+ *
+ * There are 2 classes of listeners in the AddonManager: AddonListener and
+ * InstallListener. This class is a listener for both (member functions just
+ * get called directly).
+ *
+ * When an add-on is installed, listeners are called in the following order:
+ *
+ * IL.onInstallStarted, AL.onInstalling, IL.onInstallEnded, AL.onInstalled
+ *
+ * For non-restartless add-ons, an application restart may occur between
+ * IL.onInstallEnded and AL.onInstalled. Unfortunately, Sync likely will
+ * not be loaded when AL.onInstalled is fired shortly after application
+ * start, so it won't see this event. Therefore, for add-ons requiring a
+ * restart, Sync treats the IL.onInstallEnded event as good enough to
+ * indicate an install. For restartless add-ons, Sync assumes AL.onInstalled
+ * will follow shortly after IL.onInstallEnded and thus it ignores
+ * IL.onInstallEnded.
+ *
+ * The listeners can also see events related to the download of the add-on.
+ * This class isn't interested in those. However, there are failure events,
+ * IL.onDownloadFailed and IL.onDownloadCanceled which get called if a
+ * download doesn't complete successfully.
+ *
+ * For uninstalls, we see AL.onUninstalling then AL.onUninstalled. Like
+ * installs, the events could be separated by an application restart and Sync
+ * may not see the onUninstalled event. Again, if we require a restart, we
+ * react to onUninstalling. If not, we assume we'll get onUninstalled.
+ *
+ * Enabling and disabling work by sending:
+ *
+ * AL.onEnabling, AL.onEnabled
+ * AL.onDisabling, AL.onDisabled
+ *
+ * Again, they may be separated by a restart, so we heed the requiresRestart
+ * flag.
+ *
+ * Actions can be undone. All undoable actions notify the same
+ * AL.onOperationCancelled event. We treat this event like any other.
+ *
+ * Restartless add-ons have interesting behavior during uninstall. These
+ * add-ons are first disabled then they are actually uninstalled. So, we will
+ * see AL.onDisabling and AL.onDisabled. The onUninstalling and onUninstalled
+ * events only come after the Addon Manager is closed or another view is
+ * switched to. In the case of Sync performing the uninstall, the uninstall
+ * events will occur immediately. However, we still see disabling events and
+ * heed them like they were normal. In the end, the state is proper.
+ */
+this.AddonsReconciler = function AddonsReconciler() {
+ this._log = Log.repository.getLogger("Sync.AddonsReconciler");
+ let level = Svc.Prefs.get("log.logger.addonsreconciler", "Debug");
+ this._log.level = Log.Level[level];
+
+ Svc.Obs.add("xpcom-shutdown", this.stopListening, this);
+};
+AddonsReconciler.prototype = {
+ /** Flag indicating whether we are listening to AddonManager events. */
+ _listening: false,
+
+ /**
+ * Whether state has been loaded from a file.
+ *
+ * State is loaded on demand if an operation requires it.
+ */
+ _stateLoaded: false,
+
+ /**
+ * Define this as false if the reconciler should not persist state
+ * to disk when handling events.
+ *
+ * This allows test code to avoid spinning to write during observer
+ * notifications and xpcom shutdown, which appears to cause hangs on WinXP
+ * (Bug 873861).
+ */
+ _shouldPersist: true,
+
+ /** Log logger instance */
+ _log: null,
+
+ /**
+ * Container for add-on metadata.
+ *
+ * Keys are add-on IDs. Values are objects which describe the state of the
+ * add-on. This is a minimal mirror of data that can be queried from
+ * AddonManager. In some cases, we retain data longer than AddonManager.
+ */
+ _addons: {},
+
+ /**
+ * List of add-on changes over time.
+ *
+ * Each element is an array of [time, change, id].
+ */
+ _changes: [],
+
+ /**
+ * Objects subscribed to changes made to this instance.
+ */
+ _listeners: [],
+
+ /**
+ * Accessor for add-ons in this object.
+ *
+ * Returns an object mapping add-on IDs to objects containing metadata.
+ */
+ get addons() {
+ this._ensureStateLoaded();
+ return this._addons;
+ },
+
+ /**
+ * Load reconciler state from a file.
+ *
+ * The path is relative to the weave directory in the profile. If no
+ * path is given, the default one is used.
+ *
+ * If the file does not exist or there was an error parsing the file, the
+ * state will be transparently defined as empty.
+ *
+ * @param path
+ * Path to load. ".json" is appended automatically. If not defined,
+ * a default path will be consulted.
+ * @param callback
+ * Callback to be executed upon file load. The callback receives a
+ * truthy error argument signifying whether an error occurred and a
+ * boolean indicating whether data was loaded.
+ */
+ loadState: function loadState(path, callback) {
+ let file = path || DEFAULT_STATE_FILE;
+ Utils.jsonLoad(file, this, function(json) {
+ this._addons = {};
+ this._changes = [];
+
+ if (!json) {
+ this._log.debug("No data seen in loaded file: " + file);
+ if (callback) {
+ callback(null, false);
+ }
+
+ return;
+ }
+
+ let version = json.version;
+ if (!version || version != 1) {
+ this._log.error("Could not load JSON file because version not " +
+ "supported: " + version);
+ if (callback) {
+ callback(null, false);
+ }
+
+ return;
+ }
+
+ this._addons = json.addons;
+ for (let id in this._addons) {
+ let record = this._addons[id];
+ record.modified = new Date(record.modified);
+ }
+
+ for (let [time, change, id] of json.changes) {
+ this._changes.push([new Date(time), change, id]);
+ }
+
+ if (callback) {
+ callback(null, true);
+ }
+ });
+ },
+
+ /**
+ * Saves the current state to a file in the local profile.
+ *
+ * @param path
+ * String path in profile to save to. If not defined, the default
+ * will be used.
+ * @param callback
+ * Function to be invoked on save completion. No parameters will be
+ * passed to callback.
+ */
+ saveState: function saveState(path, callback) {
+ let file = path || DEFAULT_STATE_FILE;
+ let state = {version: 1, addons: {}, changes: []};
+
+ for (let [id, record] of Object.entries(this._addons)) {
+ state.addons[id] = {};
+ for (let [k, v] of Object.entries(record)) {
+ if (k == "modified") {
+ state.addons[id][k] = v.getTime();
+ }
+ else {
+ state.addons[id][k] = v;
+ }
+ }
+ }
+
+ for (let [time, change, id] of this._changes) {
+ state.changes.push([time.getTime(), change, id]);
+ }
+
+ this._log.info("Saving reconciler state to file: " + file);
+ Utils.jsonSave(file, this, state, callback);
+ },
+
+ /**
+ * Registers a change listener with this instance.
+ *
+ * Change listeners are called every time a change is recorded. The listener
+ * is an object with the function "changeListener" that takes 3 arguments,
+ * the Date at which the change happened, the type of change (a CHANGE_*
+ * constant), and the add-on state object reflecting the current state of
+ * the add-on at the time of the change.
+ *
+ * @param listener
+ * Object containing changeListener function.
+ */
+ addChangeListener: function addChangeListener(listener) {
+ if (this._listeners.indexOf(listener) == -1) {
+ this._log.debug("Adding change listener.");
+ this._listeners.push(listener);
+ }
+ },
+
+ /**
+ * Removes a previously-installed change listener from the instance.
+ *
+ * @param listener
+ * Listener instance to remove.
+ */
+ removeChangeListener: function removeChangeListener(listener) {
+ this._listeners = this._listeners.filter(function(element) {
+ if (element == listener) {
+ this._log.debug("Removing change listener.");
+ return false;
+ } else {
+ return true;
+ }
+ }.bind(this));
+ },
+
+ /**
+ * Tells the instance to start listening for AddonManager changes.
+ *
+ * This is typically called automatically when Sync is loaded.
+ */
+ startListening: function startListening() {
+ if (this._listening) {
+ return;
+ }
+
+ this._log.info("Registering as Add-on Manager listener.");
+ AddonManager.addAddonListener(this);
+ AddonManager.addInstallListener(this);
+ this._listening = true;
+ },
+
+ /**
+ * Tells the instance to stop listening for AddonManager changes.
+ *
+ * The reconciler should always be listening. This should only be called when
+ * the instance is being destroyed.
+ *
+ * This function will get called automatically on XPCOM shutdown. However, it
+ * is a best practice to call it yourself.
+ */
+ stopListening: function stopListening() {
+ if (!this._listening) {
+ return;
+ }
+
+ this._log.debug("Stopping listening and removing AddonManager listeners.");
+ AddonManager.removeInstallListener(this);
+ AddonManager.removeAddonListener(this);
+ this._listening = false;
+ },
+
+ /**
+ * Refreshes the global state of add-ons by querying the AddonManager.
+ */
+ refreshGlobalState: function refreshGlobalState(callback) {
+ this._log.info("Refreshing global state from AddonManager.");
+ this._ensureStateLoaded();
+
+ let installs;
+
+ AddonManager.getAllAddons(function (addons) {
+ let ids = {};
+
+ for (let addon of addons) {
+ ids[addon.id] = true;
+ this.rectifyStateFromAddon(addon);
+ }
+
+ // Look for locally-defined add-ons that no longer exist and update their
+ // record.
+ for (let [id, addon] of Object.entries(this._addons)) {
+ if (id in ids) {
+ continue;
+ }
+
+ // If the id isn't in ids, it means that the add-on has been deleted or
+ // the add-on is in the process of being installed. We detect the
+ // latter by seeing if an AddonInstall is found for this add-on.
+
+ if (!installs) {
+ let cb = Async.makeSyncCallback();
+ AddonManager.getAllInstalls(cb);
+ installs = Async.waitForSyncCallback(cb);
+ }
+
+ let installFound = false;
+ for (let install of installs) {
+ if (install.addon && install.addon.id == id &&
+ install.state == AddonManager.STATE_INSTALLED) {
+
+ installFound = true;
+ break;
+ }
+ }
+
+ if (installFound) {
+ continue;
+ }
+
+ if (addon.installed) {
+ addon.installed = false;
+ this._log.debug("Adding change because add-on not present in " +
+ "Add-on Manager: " + id);
+ this._addChange(new Date(), CHANGE_UNINSTALLED, addon);
+ }
+ }
+
+ // See note for _shouldPersist.
+ if (this._shouldPersist) {
+ this.saveState(null, callback);
+ } else {
+ callback();
+ }
+ }.bind(this));
+ },
+
+ /**
+ * Rectifies the state of an add-on from an Addon instance.
+ *
+ * This basically says "given an Addon instance, assume it is truth and
+ * apply changes to the local state to reflect it."
+ *
+ * This function could result in change listeners being called if the local
+ * state differs from the passed add-on's state.
+ *
+ * @param addon
+ * Addon instance being updated.
+ */
+ rectifyStateFromAddon: function rectifyStateFromAddon(addon) {
+ this._log.debug(`Rectifying state for addon ${addon.name} (version=${addon.version}, id=${addon.id})`);
+ this._ensureStateLoaded();
+
+ let id = addon.id;
+ let enabled = !addon.userDisabled;
+ let guid = addon.syncGUID;
+ let now = new Date();
+
+ if (!(id in this._addons)) {
+ let record = {
+ id: id,
+ guid: guid,
+ enabled: enabled,
+ installed: true,
+ modified: now,
+ type: addon.type,
+ scope: addon.scope,
+ foreignInstall: addon.foreignInstall,
+ isSyncable: addon.isSyncable,
+ };
+ this._addons[id] = record;
+ this._log.debug("Adding change because add-on not present locally: " +
+ id);
+ this._addChange(now, CHANGE_INSTALLED, record);
+ return;
+ }
+
+ let record = this._addons[id];
+ record.isSyncable = addon.isSyncable;
+
+ if (!record.installed) {
+ // It is possible the record is marked as uninstalled because an
+ // uninstall is pending.
+ if (!(addon.pendingOperations & AddonManager.PENDING_UNINSTALL)) {
+ record.installed = true;
+ record.modified = now;
+ }
+ }
+
+ if (record.enabled != enabled) {
+ record.enabled = enabled;
+ record.modified = now;
+ let change = enabled ? CHANGE_ENABLED : CHANGE_DISABLED;
+ this._log.debug("Adding change because enabled state changed: " + id);
+ this._addChange(new Date(), change, record);
+ }
+
+ if (record.guid != guid) {
+ record.guid = guid;
+ // We don't record a change because the Sync engine rectifies this on its
+ // own. This is tightly coupled with Sync. If this code is ever lifted
+ // outside of Sync, this exception should likely be removed.
+ }
+ },
+
+ /**
+ * Record a change in add-on state.
+ *
+ * @param date
+ * Date at which the change occurred.
+ * @param change
+ * The type of the change. A CHANGE_* constant.
+ * @param state
+ * The new state of the add-on. From this.addons.
+ */
+ _addChange: function _addChange(date, change, state) {
+ this._log.info("Change recorded for " + state.id);
+ this._changes.push([date, change, state.id]);
+
+ for (let listener of this._listeners) {
+ try {
+ listener.changeListener.call(listener, date, change, state);
+ } catch (ex) {
+ this._log.warn("Exception calling change listener", ex);
+ }
+ }
+ },
+
+ /**
+ * Obtain the set of changes to add-ons since the date passed.
+ *
+ * This will return an array of arrays. Each entry in the array has the
+ * elements [date, change_type, id], where
+ *
+ * date - Date instance representing when the change occurred.
+ * change_type - One of CHANGE_* constants.
+ * id - ID of add-on that changed.
+ */
+ getChangesSinceDate: function getChangesSinceDate(date) {
+ this._ensureStateLoaded();
+
+ let length = this._changes.length;
+ for (let i = 0; i < length; i++) {
+ if (this._changes[i][0] >= date) {
+ return this._changes.slice(i);
+ }
+ }
+
+ return [];
+ },
+
+ /**
+ * Prunes all recorded changes from before the specified Date.
+ *
+ * @param date
+ * Entries older than this Date will be removed.
+ */
+ pruneChangesBeforeDate: function pruneChangesBeforeDate(date) {
+ this._ensureStateLoaded();
+
+ this._changes = this._changes.filter(function test_age(change) {
+ return change[0] >= date;
+ });
+ },
+
+ /**
+ * Obtains the set of all known Sync GUIDs for add-ons.
+ *
+ * @return Object with guids as keys and values of true.
+ */
+ getAllSyncGUIDs: function getAllSyncGUIDs() {
+ let result = {};
+ for (let id in this.addons) {
+ result[id] = true;
+ }
+
+ return result;
+ },
+
+ /**
+ * Obtain the add-on state record for an add-on by Sync GUID.
+ *
+ * If the add-on could not be found, returns null.
+ *
+ * @param guid
+ * Sync GUID of add-on to retrieve.
+ * @return Object on success on null on failure.
+ */
+ getAddonStateFromSyncGUID: function getAddonStateFromSyncGUID(guid) {
+ for (let id in this.addons) {
+ let addon = this.addons[id];
+ if (addon.guid == guid) {
+ return addon;
+ }
+ }
+
+ return null;
+ },
+
+ /**
+ * Ensures that state is loaded before continuing.
+ *
+ * This is called internally by anything that accesses the internal data
+ * structures. It effectively just-in-time loads serialized state.
+ */
+ _ensureStateLoaded: function _ensureStateLoaded() {
+ if (this._stateLoaded) {
+ return;
+ }
+
+ let cb = Async.makeSpinningCallback();
+ this.loadState(null, cb);
+ cb.wait();
+ this._stateLoaded = true;
+ },
+
+ /**
+ * Handler that is invoked as part of the AddonManager listeners.
+ */
+ _handleListener: function _handlerListener(action, addon, requiresRestart) {
+ // Since this is called as an observer, we explicitly trap errors and
+ // log them to ourselves so we don't see errors reported elsewhere.
+ try {
+ let id = addon.id;
+ this._log.debug("Add-on change: " + action + " to " + id);
+
+ // We assume that every event for non-restartless add-ons is
+ // followed by another event and that this follow-up event is the most
+ // appropriate to react to. Currently we ignore onEnabling, onDisabling,
+ // and onUninstalling for non-restartless add-ons.
+ if (requiresRestart === false) {
+ this._log.debug("Ignoring " + action + " for restartless add-on.");
+ return;
+ }
+
+ switch (action) {
+ case "onEnabling":
+ case "onEnabled":
+ case "onDisabling":
+ case "onDisabled":
+ case "onInstalled":
+ case "onInstallEnded":
+ case "onOperationCancelled":
+ this.rectifyStateFromAddon(addon);
+ break;
+
+ case "onUninstalling":
+ case "onUninstalled":
+ let id = addon.id;
+ let addons = this.addons;
+ if (id in addons) {
+ let now = new Date();
+ let record = addons[id];
+ record.installed = false;
+ record.modified = now;
+ this._log.debug("Adding change because of uninstall listener: " +
+ id);
+ this._addChange(now, CHANGE_UNINSTALLED, record);
+ }
+ }
+
+ // See note for _shouldPersist.
+ if (this._shouldPersist) {
+ let cb = Async.makeSpinningCallback();
+ this.saveState(null, cb);
+ cb.wait();
+ }
+ }
+ catch (ex) {
+ this._log.warn("Exception", ex);
+ }
+ },
+
+ // AddonListeners
+ onEnabling: function onEnabling(addon, requiresRestart) {
+ this._handleListener("onEnabling", addon, requiresRestart);
+ },
+ onEnabled: function onEnabled(addon) {
+ this._handleListener("onEnabled", addon);
+ },
+ onDisabling: function onDisabling(addon, requiresRestart) {
+ this._handleListener("onDisabling", addon, requiresRestart);
+ },
+ onDisabled: function onDisabled(addon) {
+ this._handleListener("onDisabled", addon);
+ },
+ onInstalling: function onInstalling(addon, requiresRestart) {
+ this._handleListener("onInstalling", addon, requiresRestart);
+ },
+ onInstalled: function onInstalled(addon) {
+ this._handleListener("onInstalled", addon);
+ },
+ onUninstalling: function onUninstalling(addon, requiresRestart) {
+ this._handleListener("onUninstalling", addon, requiresRestart);
+ },
+ onUninstalled: function onUninstalled(addon) {
+ this._handleListener("onUninstalled", addon);
+ },
+ onOperationCancelled: function onOperationCancelled(addon) {
+ this._handleListener("onOperationCancelled", addon);
+ },
+
+ // InstallListeners
+ onInstallEnded: function onInstallEnded(install, addon) {
+ this._handleListener("onInstallEnded", addon);
+ }
+};
diff --git a/services/sync/modules/addonutils.js b/services/sync/modules/addonutils.js
new file mode 100644
index 000000000..95da6be0a
--- /dev/null
+++ b/services/sync/modules/addonutils.js
@@ -0,0 +1,506 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = ["AddonUtils"];
+
+var {interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/util.js");
+
+XPCOMUtils.defineLazyModuleGetter(this, "AddonManager",
+ "resource://gre/modules/AddonManager.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "AddonRepository",
+ "resource://gre/modules/addons/AddonRepository.jsm");
+
+function AddonUtilsInternal() {
+ this._log = Log.repository.getLogger("Sync.AddonUtils");
+ this._log.Level = Log.Level[Svc.Prefs.get("log.logger.addonutils")];
+}
+AddonUtilsInternal.prototype = {
+ /**
+ * Obtain an AddonInstall object from an AddonSearchResult instance.
+ *
+ * The callback will be invoked with the result of the operation. The
+ * callback receives 2 arguments, error and result. Error will be falsy
+ * on success or some kind of error value otherwise. The result argument
+ * will be an AddonInstall on success or null on failure. It is possible
+ * for the error to be falsy but result to be null. This could happen if
+ * an install was not found.
+ *
+ * @param addon
+ * AddonSearchResult to obtain install from.
+ * @param cb
+ * Function to be called with result of operation.
+ */
+ getInstallFromSearchResult:
+ function getInstallFromSearchResult(addon, cb) {
+
+ this._log.debug("Obtaining install for " + addon.id);
+
+ // We should theoretically be able to obtain (and use) addon.install if
+ // it is available. However, the addon.sourceURI rewriting won't be
+ // reflected in the AddonInstall, so we can't use it. If we ever get rid
+ // of sourceURI rewriting, we can avoid having to reconstruct the
+ // AddonInstall.
+ AddonManager.getInstallForURL(
+ addon.sourceURI.spec,
+ function handleInstall(install) {
+ cb(null, install);
+ },
+ "application/x-xpinstall",
+ undefined,
+ addon.name,
+ addon.iconURL,
+ addon.version
+ );
+ },
+
+ /**
+ * Installs an add-on from an AddonSearchResult instance.
+ *
+ * The options argument defines extra options to control the install.
+ * Recognized keys in this map are:
+ *
+ * syncGUID - Sync GUID to use for the new add-on.
+ * enabled - Boolean indicating whether the add-on should be enabled upon
+ * install.
+ *
+ * When complete it calls a callback with 2 arguments, error and result.
+ *
+ * If error is falsy, result is an object. If error is truthy, result is
+ * null.
+ *
+ * The result object has the following keys:
+ *
+ * id ID of add-on that was installed.
+ * install AddonInstall that was installed.
+ * addon Addon that was installed.
+ *
+ * @param addon
+ * AddonSearchResult to install add-on from.
+ * @param options
+ * Object with additional metadata describing how to install add-on.
+ * @param cb
+ * Function to be invoked with result of operation.
+ */
+ installAddonFromSearchResult:
+ function installAddonFromSearchResult(addon, options, cb) {
+ this._log.info("Trying to install add-on from search result: " + addon.id);
+
+ this.getInstallFromSearchResult(addon, function onResult(error, install) {
+ if (error) {
+ cb(error, null);
+ return;
+ }
+
+ if (!install) {
+ cb(new Error("AddonInstall not available: " + addon.id), null);
+ return;
+ }
+
+ try {
+ this._log.info("Installing " + addon.id);
+ let log = this._log;
+
+ let listener = {
+ onInstallStarted: function onInstallStarted(install) {
+ if (!options) {
+ return;
+ }
+
+ if (options.syncGUID) {
+ log.info("Setting syncGUID of " + install.name +": " +
+ options.syncGUID);
+ install.addon.syncGUID = options.syncGUID;
+ }
+
+ // We only need to change userDisabled if it is disabled because
+ // enabled is the default.
+ if ("enabled" in options && !options.enabled) {
+ log.info("Marking add-on as disabled for install: " +
+ install.name);
+ install.addon.userDisabled = true;
+ }
+ },
+ onInstallEnded: function(install, addon) {
+ install.removeListener(listener);
+
+ cb(null, {id: addon.id, install: install, addon: addon});
+ },
+ onInstallFailed: function(install) {
+ install.removeListener(listener);
+
+ cb(new Error("Install failed: " + install.error), null);
+ },
+ onDownloadFailed: function(install) {
+ install.removeListener(listener);
+
+ cb(new Error("Download failed: " + install.error), null);
+ }
+ };
+ install.addListener(listener);
+ install.install();
+ }
+ catch (ex) {
+ this._log.error("Error installing add-on", ex);
+ cb(ex, null);
+ }
+ }.bind(this));
+ },
+
+ /**
+ * Uninstalls the Addon instance and invoke a callback when it is done.
+ *
+ * @param addon
+ * Addon instance to uninstall.
+ * @param cb
+ * Function to be invoked when uninstall has finished. It receives a
+ * truthy value signifying error and the add-on which was uninstalled.
+ */
+ uninstallAddon: function uninstallAddon(addon, cb) {
+ let listener = {
+ onUninstalling: function(uninstalling, needsRestart) {
+ if (addon.id != uninstalling.id) {
+ return;
+ }
+
+ // We assume restartless add-ons will send the onUninstalled event
+ // soon.
+ if (!needsRestart) {
+ return;
+ }
+
+ // For non-restartless add-ons, we issue the callback on uninstalling
+ // because we will likely never see the uninstalled event.
+ AddonManager.removeAddonListener(listener);
+ cb(null, addon);
+ },
+ onUninstalled: function(uninstalled) {
+ if (addon.id != uninstalled.id) {
+ return;
+ }
+
+ AddonManager.removeAddonListener(listener);
+ cb(null, addon);
+ }
+ };
+ AddonManager.addAddonListener(listener);
+ addon.uninstall();
+ },
+
+ /**
+ * Installs multiple add-ons specified by metadata.
+ *
+ * The first argument is an array of objects. Each object must have the
+ * following keys:
+ *
+ * id - public ID of the add-on to install.
+ * syncGUID - syncGUID for new add-on.
+ * enabled - boolean indicating whether the add-on should be enabled.
+ * requireSecureURI - Boolean indicating whether to require a secure
+ * URI when installing from a remote location. This defaults to
+ * true.
+ *
+ * The callback will be called when activity on all add-ons is complete. The
+ * callback receives 2 arguments, error and result.
+ *
+ * If error is truthy, it contains a string describing the overall error.
+ *
+ * The 2nd argument to the callback is always an object with details on the
+ * overall execution state. It contains the following keys:
+ *
+ * installedIDs Array of add-on IDs that were installed.
+ * installs Array of AddonInstall instances that were installed.
+ * addons Array of Addon instances that were installed.
+ * errors Array of errors encountered. Only has elements if error is
+ * truthy.
+ *
+ * @param installs
+ * Array of objects describing add-ons to install.
+ * @param cb
+ * Function to be called when all actions are complete.
+ */
+ installAddons: function installAddons(installs, cb) {
+ if (!cb) {
+ throw new Error("Invalid argument: cb is not defined.");
+ }
+
+ let ids = [];
+ for (let addon of installs) {
+ ids.push(addon.id);
+ }
+
+ AddonRepository.getAddonsByIDs(ids, {
+ searchSucceeded: function searchSucceeded(addons, addonsLength, total) {
+ this._log.info("Found " + addonsLength + "/" + ids.length +
+ " add-ons during repository search.");
+
+ let ourResult = {
+ installedIDs: [],
+ installs: [],
+ addons: [],
+ skipped: [],
+ errors: []
+ };
+
+ if (!addonsLength) {
+ cb(null, ourResult);
+ return;
+ }
+
+ let expectedInstallCount = 0;
+ let finishedCount = 0;
+ let installCallback = function installCallback(error, result) {
+ finishedCount++;
+
+ if (error) {
+ ourResult.errors.push(error);
+ } else {
+ ourResult.installedIDs.push(result.id);
+ ourResult.installs.push(result.install);
+ ourResult.addons.push(result.addon);
+ }
+
+ if (finishedCount >= expectedInstallCount) {
+ if (ourResult.errors.length > 0) {
+ cb(new Error("1 or more add-ons failed to install"), ourResult);
+ } else {
+ cb(null, ourResult);
+ }
+ }
+ }.bind(this);
+
+ let toInstall = [];
+
+ // Rewrite the "src" query string parameter of the source URI to note
+ // that the add-on was installed by Sync and not something else so
+ // server-side metrics aren't skewed (bug 708134). The server should
+ // ideally send proper URLs, but this solution was deemed too
+ // complicated at the time the functionality was implemented.
+ for (let addon of addons) {
+ // Find the specified options for this addon.
+ let options;
+ for (let install of installs) {
+ if (install.id == addon.id) {
+ options = install;
+ break;
+ }
+ }
+ if (!this.canInstallAddon(addon, options)) {
+ ourResult.skipped.push(addon.id);
+ continue;
+ }
+
+ // We can go ahead and attempt to install it.
+ toInstall.push(addon);
+
+ // We should always be able to QI the nsIURI to nsIURL. If not, we
+ // still try to install the add-on, but we don't rewrite the URL,
+ // potentially skewing metrics.
+ try {
+ addon.sourceURI.QueryInterface(Ci.nsIURL);
+ } catch (ex) {
+ this._log.warn("Unable to QI sourceURI to nsIURL: " +
+ addon.sourceURI.spec);
+ continue;
+ }
+
+ let params = addon.sourceURI.query.split("&").map(
+ function rewrite(param) {
+
+ if (param.indexOf("src=") == 0) {
+ return "src=sync";
+ } else {
+ return param;
+ }
+ });
+
+ addon.sourceURI.query = params.join("&");
+ }
+
+ expectedInstallCount = toInstall.length;
+
+ if (!expectedInstallCount) {
+ cb(null, ourResult);
+ return;
+ }
+
+ // Start all the installs asynchronously. They will report back to us
+ // as they finish, eventually triggering the global callback.
+ for (let addon of toInstall) {
+ let options = {};
+ for (let install of installs) {
+ if (install.id == addon.id) {
+ options = install;
+ break;
+ }
+ }
+
+ this.installAddonFromSearchResult(addon, options, installCallback);
+ }
+
+ }.bind(this),
+
+ searchFailed: function searchFailed() {
+ cb(new Error("AddonRepository search failed"), null);
+ },
+ });
+ },
+
+ /**
+ * Returns true if we are able to install the specified addon, false
+ * otherwise. It is expected that this will log the reason if it returns
+ * false.
+ *
+ * @param addon
+ * (Addon) Add-on instance to check.
+ * @param options
+ * (object) The options specified for this addon. See installAddons()
+ * for the valid elements.
+ */
+ canInstallAddon(addon, options) {
+ // sourceURI presence isn't enforced by AddonRepository. So, we skip
+ // add-ons without a sourceURI.
+ if (!addon.sourceURI) {
+ this._log.info("Skipping install of add-on because missing " +
+ "sourceURI: " + addon.id);
+ return false;
+ }
+ // Verify that the source URI uses TLS. We don't allow installs from
+ // insecure sources for security reasons. The Addon Manager ensures
+ // that cert validation etc is performed.
+ // (We should also consider just dropping this entirely and calling
+ // XPIProvider.isInstallAllowed, but that has additional semantics we might
+ // need to think through...)
+ let requireSecureURI = true;
+ if (options && options.requireSecureURI !== undefined) {
+ requireSecureURI = options.requireSecureURI;
+ }
+
+ if (requireSecureURI) {
+ let scheme = addon.sourceURI.scheme;
+ if (scheme != "https") {
+ this._log.info(`Skipping install of add-on "${addon.id}" because sourceURI's scheme of "${scheme}" is not trusted`);
+ return false;
+ }
+ }
+ this._log.info(`Add-on "${addon.id}" is able to be installed`);
+ return true;
+ },
+
+
+ /**
+ * Update the user disabled flag for an add-on.
+ *
+ * The supplied callback will be called when the operation is
+ * complete. If the new flag matches the existing or if the add-on
+ * isn't currently active, the function will fire the callback
+ * immediately. Else, the callback is invoked when the AddonManager
+ * reports the change has taken effect or has been registered.
+ *
+ * The callback receives as arguments:
+ *
+ * (Error) Encountered error during operation or null on success.
+ * (Addon) The add-on instance being operated on.
+ *
+ * @param addon
+ * (Addon) Add-on instance to operate on.
+ * @param value
+ * (bool) New value for add-on's userDisabled property.
+ * @param cb
+ * (function) Callback to be invoked on completion.
+ */
+ updateUserDisabled: function updateUserDisabled(addon, value, cb) {
+ if (addon.userDisabled == value) {
+ cb(null, addon);
+ return;
+ }
+
+ let listener = {
+ onEnabling: function onEnabling(wrapper, needsRestart) {
+ this._log.debug("onEnabling: " + wrapper.id);
+ if (wrapper.id != addon.id) {
+ return;
+ }
+
+ // We ignore the restartless case because we'll get onEnabled shortly.
+ if (!needsRestart) {
+ return;
+ }
+
+ AddonManager.removeAddonListener(listener);
+ cb(null, wrapper);
+ }.bind(this),
+
+ onEnabled: function onEnabled(wrapper) {
+ this._log.debug("onEnabled: " + wrapper.id);
+ if (wrapper.id != addon.id) {
+ return;
+ }
+
+ AddonManager.removeAddonListener(listener);
+ cb(null, wrapper);
+ }.bind(this),
+
+ onDisabling: function onDisabling(wrapper, needsRestart) {
+ this._log.debug("onDisabling: " + wrapper.id);
+ if (wrapper.id != addon.id) {
+ return;
+ }
+
+ if (!needsRestart) {
+ return;
+ }
+
+ AddonManager.removeAddonListener(listener);
+ cb(null, wrapper);
+ }.bind(this),
+
+ onDisabled: function onDisabled(wrapper) {
+ this._log.debug("onDisabled: " + wrapper.id);
+ if (wrapper.id != addon.id) {
+ return;
+ }
+
+ AddonManager.removeAddonListener(listener);
+ cb(null, wrapper);
+ }.bind(this),
+
+ onOperationCancelled: function onOperationCancelled(wrapper) {
+ this._log.debug("onOperationCancelled: " + wrapper.id);
+ if (wrapper.id != addon.id) {
+ return;
+ }
+
+ AddonManager.removeAddonListener(listener);
+ cb(new Error("Operation cancelled"), wrapper);
+ }.bind(this)
+ };
+
+ // The add-on listeners are only fired if the add-on is active. If not, the
+ // change is silently updated and made active when/if the add-on is active.
+
+ if (!addon.appDisabled) {
+ AddonManager.addAddonListener(listener);
+ }
+
+ this._log.info("Updating userDisabled flag: " + addon.id + " -> " + value);
+ addon.userDisabled = !!value;
+
+ if (!addon.appDisabled) {
+ cb(null, addon);
+ return;
+ }
+ // Else the listener will handle invoking the callback.
+ },
+
+};
+
+XPCOMUtils.defineLazyGetter(this, "AddonUtils", function() {
+ return new AddonUtilsInternal();
+});
diff --git a/services/sync/modules/bookmark_validator.js b/services/sync/modules/bookmark_validator.js
new file mode 100644
index 000000000..2a94ba043
--- /dev/null
+++ b/services/sync/modules/bookmark_validator.js
@@ -0,0 +1,784 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+const Cu = Components.utils;
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+
+
+this.EXPORTED_SYMBOLS = ["BookmarkValidator", "BookmarkProblemData"];
+
+const LEFT_PANE_ROOT_ANNO = "PlacesOrganizer/OrganizerFolder";
+const LEFT_PANE_QUERY_ANNO = "PlacesOrganizer/OrganizerQuery";
+
+// Indicates if a local bookmark tree node should be excluded from syncing.
+function isNodeIgnored(treeNode) {
+ return treeNode.annos && treeNode.annos.some(anno => anno.name == LEFT_PANE_ROOT_ANNO ||
+ anno.name == LEFT_PANE_QUERY_ANNO);
+}
+const BOOKMARK_VALIDATOR_VERSION = 1;
+
+/**
+ * Result of bookmark validation. Contains the following fields which describe
+ * server-side problems unless otherwise specified.
+ *
+ * - missingIDs (number): # of objects with missing ids
+ * - duplicates (array of ids): ids seen more than once
+ * - parentChildMismatches (array of {parent: parentid, child: childid}):
+ * instances where the child's parentid and the parent's children array
+ * do not match
+ * - cycles (array of array of ids). List of cycles found in the server-side tree.
+ * - clientCycles (array of array of ids). List of cycles found in the client-side tree.
+ * - orphans (array of {id: string, parent: string}): List of nodes with
+ * either no parentid, or where the parent could not be found.
+ * - missingChildren (array of {parent: id, child: id}):
+ * List of parent/children where the child id couldn't be found
+ * - deletedChildren (array of { parent: id, child: id }):
+ * List of parent/children where child id was a deleted item (but still showed up
+ * in the children array)
+ * - multipleParents (array of {child: id, parents: array of ids}):
+ * List of children that were part of multiple parent arrays
+ * - deletedParents (array of ids) : List of records that aren't deleted but
+ * had deleted parents
+ * - childrenOnNonFolder (array of ids): list of non-folders that still have
+ * children arrays
+ * - duplicateChildren (array of ids): list of records who have the same
+ * child listed multiple times in their children array
+ * - parentNotFolder (array of ids): list of records that have parents that
+ * aren't folders
+ * - rootOnServer (boolean): true if the root came from the server
+ * - badClientRoots (array of ids): Contains any client-side root ids where
+ * the root is missing or isn't a (direct) child of the places root.
+ *
+ * - clientMissing: Array of ids on the server missing from the client
+ * - serverMissing: Array of ids on the client missing from the server
+ * - serverDeleted: Array of ids on the client that the server had marked as deleted.
+ * - serverUnexpected: Array of ids that appear on the server but shouldn't
+ * because the client attempts to never upload them.
+ * - differences: Array of {id: string, differences: string array} recording
+ * the non-structural properties that are differente between the client and server
+ * - structuralDifferences: As above, but contains the items where the differences were
+ * structural, that is, they contained childGUIDs or parentid
+ */
+class BookmarkProblemData {
+ constructor() {
+ this.rootOnServer = false;
+ this.missingIDs = 0;
+
+ this.duplicates = [];
+ this.parentChildMismatches = [];
+ this.cycles = [];
+ this.clientCycles = [];
+ this.orphans = [];
+ this.missingChildren = [];
+ this.deletedChildren = [];
+ this.multipleParents = [];
+ this.deletedParents = [];
+ this.childrenOnNonFolder = [];
+ this.duplicateChildren = [];
+ this.parentNotFolder = [];
+
+ this.badClientRoots = [];
+ this.clientMissing = [];
+ this.serverMissing = [];
+ this.serverDeleted = [];
+ this.serverUnexpected = [];
+ this.differences = [];
+ this.structuralDifferences = [];
+ }
+
+ /**
+ * Convert ("difference", [{ differences: ["tags", "name"] }, { differences: ["name"] }]) into
+ * [{ name: "difference:tags", count: 1}, { name: "difference:name", count: 2 }], etc.
+ */
+ _summarizeDifferences(prefix, diffs) {
+ let diffCounts = new Map();
+ for (let { differences } of diffs) {
+ for (let type of differences) {
+ let name = prefix + ":" + type;
+ let count = diffCounts.get(name) || 0;
+ diffCounts.set(name, count + 1);
+ }
+ }
+ return [...diffCounts].map(([name, count]) => ({ name, count }));
+ }
+
+ /**
+ * Produce a list summarizing problems found. Each entry contains {name, count},
+ * where name is the field name for the problem, and count is the number of times
+ * the problem was encountered.
+ *
+ * Validation has failed if all counts are not 0.
+ *
+ * If the `full` argument is truthy, we also include information about which
+ * properties we saw structural differences in. Currently, this means either
+ * "sdiff:parentid" and "sdiff:childGUIDS" may be present.
+ */
+ getSummary(full) {
+ let result = [
+ { name: "clientMissing", count: this.clientMissing.length },
+ { name: "serverMissing", count: this.serverMissing.length },
+ { name: "serverDeleted", count: this.serverDeleted.length },
+ { name: "serverUnexpected", count: this.serverUnexpected.length },
+
+ { name: "structuralDifferences", count: this.structuralDifferences.length },
+ { name: "differences", count: this.differences.length },
+
+ { name: "missingIDs", count: this.missingIDs },
+ { name: "rootOnServer", count: this.rootOnServer ? 1 : 0 },
+
+ { name: "duplicates", count: this.duplicates.length },
+ { name: "parentChildMismatches", count: this.parentChildMismatches.length },
+ { name: "cycles", count: this.cycles.length },
+ { name: "clientCycles", count: this.clientCycles.length },
+ { name: "badClientRoots", count: this.badClientRoots.length },
+ { name: "orphans", count: this.orphans.length },
+ { name: "missingChildren", count: this.missingChildren.length },
+ { name: "deletedChildren", count: this.deletedChildren.length },
+ { name: "multipleParents", count: this.multipleParents.length },
+ { name: "deletedParents", count: this.deletedParents.length },
+ { name: "childrenOnNonFolder", count: this.childrenOnNonFolder.length },
+ { name: "duplicateChildren", count: this.duplicateChildren.length },
+ { name: "parentNotFolder", count: this.parentNotFolder.length },
+ ];
+ if (full) {
+ let structural = this._summarizeDifferences("sdiff", this.structuralDifferences);
+ result.push.apply(result, structural);
+ }
+ return result;
+ }
+}
+
+// Defined lazily to avoid initializing PlacesUtils.bookmarks too soon.
+XPCOMUtils.defineLazyGetter(this, "SYNCED_ROOTS", () => [
+ PlacesUtils.bookmarks.menuGuid,
+ PlacesUtils.bookmarks.toolbarGuid,
+ PlacesUtils.bookmarks.unfiledGuid,
+ PlacesUtils.bookmarks.mobileGuid,
+]);
+
+class BookmarkValidator {
+
+ _followQueries(recordMap) {
+ for (let [guid, entry] of recordMap) {
+ if (entry.type !== "query" && (!entry.bmkUri || !entry.bmkUri.startsWith("place:"))) {
+ continue;
+ }
+ // Might be worth trying to parse the place: query instead so that this
+ // works "automatically" with things like aboutsync.
+ let queryNodeParent = PlacesUtils.getFolderContents(entry, false, true);
+ if (!queryNodeParent || !queryNodeParent.root.hasChildren) {
+ continue;
+ }
+ queryNodeParent = queryNodeParent.root;
+ let queryNode = null;
+ let numSiblings = 0;
+ let containerWasOpen = queryNodeParent.containerOpen;
+ queryNodeParent.containerOpen = true;
+ try {
+ try {
+ numSiblings = queryNodeParent.childCount;
+ } catch (e) {
+ // This throws when we can't actually get the children. This is the
+ // case for history containers, tag queries, ...
+ continue;
+ }
+ for (let i = 0; i < numSiblings && !queryNode; ++i) {
+ let child = queryNodeParent.getChild(i);
+ if (child && child.bookmarkGuid && child.bookmarkGuid === guid) {
+ queryNode = child;
+ }
+ }
+ } finally {
+ queryNodeParent.containerOpen = containerWasOpen;
+ }
+ if (!queryNode) {
+ continue;
+ }
+
+ let concreteId = PlacesUtils.getConcreteItemGuid(queryNode);
+ if (!concreteId) {
+ continue;
+ }
+ let concreteItem = recordMap.get(concreteId);
+ if (!concreteItem) {
+ continue;
+ }
+ entry.concrete = concreteItem;
+ }
+ }
+
+ createClientRecordsFromTree(clientTree) {
+ // Iterate over the treeNode, converting it to something more similar to what
+ // the server stores.
+ let records = [];
+ let recordsByGuid = new Map();
+ let syncedRoots = SYNCED_ROOTS;
+ function traverse(treeNode, synced) {
+ if (!synced) {
+ synced = syncedRoots.includes(treeNode.guid);
+ } else if (isNodeIgnored(treeNode)) {
+ synced = false;
+ }
+ let guid = PlacesSyncUtils.bookmarks.guidToSyncId(treeNode.guid);
+ let itemType = 'item';
+ treeNode.ignored = !synced;
+ treeNode.id = guid;
+ switch (treeNode.type) {
+ case PlacesUtils.TYPE_X_MOZ_PLACE:
+ let query = null;
+ if (treeNode.annos && treeNode.uri.startsWith("place:")) {
+ query = treeNode.annos.find(({name}) =>
+ name === PlacesSyncUtils.bookmarks.SMART_BOOKMARKS_ANNO);
+ }
+ if (query && query.value) {
+ itemType = 'query';
+ } else {
+ itemType = 'bookmark';
+ }
+ break;
+ case PlacesUtils.TYPE_X_MOZ_PLACE_CONTAINER:
+ let isLivemark = false;
+ if (treeNode.annos) {
+ for (let anno of treeNode.annos) {
+ if (anno.name === PlacesUtils.LMANNO_FEEDURI) {
+ isLivemark = true;
+ treeNode.feedUri = anno.value;
+ } else if (anno.name === PlacesUtils.LMANNO_SITEURI) {
+ isLivemark = true;
+ treeNode.siteUri = anno.value;
+ }
+ }
+ }
+ itemType = isLivemark ? "livemark" : "folder";
+ break;
+ case PlacesUtils.TYPE_X_MOZ_PLACE_SEPARATOR:
+ itemType = 'separator';
+ break;
+ }
+
+ if (treeNode.tags) {
+ treeNode.tags = treeNode.tags.split(",");
+ } else {
+ treeNode.tags = [];
+ }
+ treeNode.type = itemType;
+ treeNode.pos = treeNode.index;
+ treeNode.bmkUri = treeNode.uri;
+ records.push(treeNode);
+ // We want to use the "real" guid here.
+ recordsByGuid.set(treeNode.guid, treeNode);
+ if (treeNode.type === 'folder') {
+ treeNode.childGUIDs = [];
+ if (!treeNode.children) {
+ treeNode.children = [];
+ }
+ for (let child of treeNode.children) {
+ traverse(child, synced);
+ child.parent = treeNode;
+ child.parentid = guid;
+ treeNode.childGUIDs.push(child.guid);
+ }
+ }
+ }
+ traverse(clientTree, false);
+ clientTree.id = 'places';
+ this._followQueries(recordsByGuid);
+ return records;
+ }
+
+ /**
+ * Process the server-side list. Mainly this builds the records into a tree,
+ * but it also records information about problems, and produces arrays of the
+ * deleted and non-deleted nodes.
+ *
+ * Returns an object containing:
+ * - records:Array of non-deleted records. Each record contains the following
+ * properties
+ * - childGUIDs (array of strings, only present if type is 'folder'): the
+ * list of child GUIDs stored on the server.
+ * - children (array of records, only present if type is 'folder'):
+ * each record has these same properties. This may differ in content
+ * from what you may expect from the childGUIDs list, as it won't
+ * contain any records that could not be found.
+ * - parent (record): The parent to this record.
+ * - Unchanged properties send down from the server: id, title, type,
+ * parentName, parentid, bmkURI, keyword, tags, pos, queryId, loadInSidebar
+ * - root: Root of the server-side bookmark tree. Has the same properties as
+ * above.
+ * - deletedRecords: As above, but only contains items that the server sent
+ * where it also sent indication that the item should be deleted.
+ * - problemData: a BookmarkProblemData object, with the caveat that
+ * the fields describing client/server relationship will not have been filled
+ * out yet.
+ */
+ inspectServerRecords(serverRecords) {
+ let deletedItemIds = new Set();
+ let idToRecord = new Map();
+ let deletedRecords = [];
+
+ let folders = [];
+ let problems = [];
+
+ let problemData = new BookmarkProblemData();
+
+ let resultRecords = [];
+
+ for (let record of serverRecords) {
+ if (!record.id) {
+ ++problemData.missingIDs;
+ continue;
+ }
+ if (record.deleted) {
+ deletedItemIds.add(record.id);
+ } else {
+ if (idToRecord.has(record.id)) {
+ problemData.duplicates.push(record.id);
+ continue;
+ }
+ }
+ idToRecord.set(record.id, record);
+
+ if (record.children) {
+ if (record.type !== "folder") {
+ // Due to implementation details in engines/bookmarks.js, (Livemark
+ // subclassing BookmarkFolder) Livemarks will have a children array,
+ // but it should still be empty.
+ if (!record.children.length) {
+ continue;
+ }
+ // Otherwise we mark it as an error and still try to resolve the children
+ problemData.childrenOnNonFolder.push(record.id);
+ }
+ folders.push(record);
+
+ if (new Set(record.children).size !== record.children.length) {
+ problemData.duplicateChildren.push(record.id)
+ }
+
+ // The children array stores special guids as their local guid values,
+ // e.g. 'menu________' instead of 'menu', but all other parts of the
+ // serverside bookmark info stores it as the special value ('menu').
+ record.childGUIDs = record.children;
+ record.children = record.children.map(childID => {
+ return PlacesSyncUtils.bookmarks.guidToSyncId(childID);
+ });
+ }
+ }
+
+ for (let deletedId of deletedItemIds) {
+ let record = idToRecord.get(deletedId);
+ if (record && !record.isDeleted) {
+ deletedRecords.push(record);
+ record.isDeleted = true;
+ }
+ }
+
+ let root = idToRecord.get('places');
+
+ if (!root) {
+ // Fabricate a root. We want to remember that it's fake so that we can
+ // avoid complaining about stuff like it missing it's childGUIDs later.
+ root = { id: 'places', children: [], type: 'folder', title: '', fake: true };
+ resultRecords.push(root);
+ idToRecord.set('places', root);
+ } else {
+ problemData.rootOnServer = true;
+ }
+
+ // Build the tree, find orphans, and record most problems having to do with
+ // the tree structure.
+ for (let [id, record] of idToRecord) {
+ if (record === root) {
+ continue;
+ }
+
+ if (record.isDeleted) {
+ continue;
+ }
+
+ let parentID = record.parentid;
+ if (!parentID) {
+ problemData.orphans.push({id: record.id, parent: parentID});
+ continue;
+ }
+
+ let parent = idToRecord.get(parentID);
+ if (!parent) {
+ problemData.orphans.push({id: record.id, parent: parentID});
+ continue;
+ }
+
+ if (parent.type !== 'folder') {
+ problemData.parentNotFolder.push(record.id);
+ if (!parent.children) {
+ parent.children = [];
+ }
+ if (!parent.childGUIDs) {
+ parent.childGUIDs = [];
+ }
+ }
+
+ if (!record.isDeleted) {
+ resultRecords.push(record);
+ }
+
+ record.parent = parent;
+ if (parent !== root || problemData.rootOnServer) {
+ let childIndex = parent.children.indexOf(id);
+ if (childIndex < 0) {
+ problemData.parentChildMismatches.push({parent: parent.id, child: record.id});
+ } else {
+ parent.children[childIndex] = record;
+ }
+ } else {
+ parent.children.push(record);
+ }
+
+ if (parent.isDeleted && !record.isDeleted) {
+ problemData.deletedParents.push(record.id);
+ }
+
+ // We used to check if the parentName on the server matches the actual
+ // local parent name, but given this is used only for de-duping a record
+ // the first time it is seen and expensive to keep up-to-date, we decided
+ // to just stop recording it. See bug 1276969 for more.
+ }
+
+ // Check that we aren't missing any children.
+ for (let folder of folders) {
+ folder.unfilteredChildren = folder.children;
+ folder.children = [];
+ for (let ci = 0; ci < folder.unfilteredChildren.length; ++ci) {
+ let child = folder.unfilteredChildren[ci];
+ let childObject;
+ if (typeof child == "string") {
+ // This can happen the parent refers to a child that has a different
+ // parentid, or if it refers to a missing or deleted child. It shouldn't
+ // be possible with totally valid bookmarks.
+ childObject = idToRecord.get(child);
+ if (!childObject) {
+ problemData.missingChildren.push({parent: folder.id, child});
+ } else {
+ folder.unfilteredChildren[ci] = childObject;
+ if (childObject.isDeleted) {
+ problemData.deletedChildren.push({ parent: folder.id, child });
+ }
+ }
+ } else {
+ childObject = child;
+ }
+
+ if (!childObject) {
+ continue;
+ }
+
+ if (childObject.parentid === folder.id) {
+ folder.children.push(childObject);
+ continue;
+ }
+
+ // The child is very probably in multiple `children` arrays --
+ // see if we already have a problem record about it.
+ let currentProblemRecord = problemData.multipleParents.find(pr =>
+ pr.child === child);
+
+ if (currentProblemRecord) {
+ currentProblemRecord.parents.push(folder.id);
+ continue;
+ }
+
+ let otherParent = idToRecord.get(childObject.parentid);
+ // it's really an ... orphan ... sort of.
+ if (!otherParent) {
+ // if we never end up adding to this parent's list, we filter it out after this loop.
+ problemData.multipleParents.push({
+ child,
+ parents: [folder.id]
+ });
+ if (!problemData.orphans.some(r => r.id === child)) {
+ problemData.orphans.push({
+ id: child,
+ parent: childObject.parentid
+ });
+ }
+ continue;
+ }
+
+ if (otherParent.isDeleted) {
+ if (!problemData.deletedParents.includes(child)) {
+ problemData.deletedParents.push(child);
+ }
+ continue;
+ }
+
+ if (otherParent.childGUIDs && !otherParent.childGUIDs.includes(child)) {
+ if (!problemData.parentChildMismatches.some(r => r.child === child)) {
+ // Might not be possible to get here.
+ problemData.parentChildMismatches.push({ child, parent: folder.id });
+ }
+ }
+
+ problemData.multipleParents.push({
+ child,
+ parents: [childObject.parentid, folder.id]
+ });
+ }
+ }
+ problemData.multipleParents = problemData.multipleParents.filter(record =>
+ record.parents.length >= 2);
+
+ problemData.cycles = this._detectCycles(resultRecords);
+
+ return {
+ deletedRecords,
+ records: resultRecords,
+ problemData,
+ root,
+ };
+ }
+
+ // helper for inspectServerRecords
+ _detectCycles(records) {
+ // currentPath and pathLookup contain the same data. pathLookup is faster to
+ // query, but currentPath gives is the order of traversal that we need in
+ // order to report the members of the cycles.
+ let pathLookup = new Set();
+ let currentPath = [];
+ let cycles = [];
+ let seenEver = new Set();
+ const traverse = node => {
+ if (pathLookup.has(node)) {
+ let cycleStart = currentPath.lastIndexOf(node);
+ let cyclePath = currentPath.slice(cycleStart).map(n => n.id);
+ cycles.push(cyclePath);
+ return;
+ } else if (seenEver.has(node)) {
+ // If we're checking the server, this is a problem, but it should already be reported.
+ // On the client, this could happen due to including `node.concrete` in the child list.
+ return;
+ }
+ seenEver.add(node);
+ let children = node.children || [];
+ if (node.concrete) {
+ children.push(node.concrete);
+ }
+ if (children) {
+ pathLookup.add(node);
+ currentPath.push(node);
+ for (let child of children) {
+ traverse(child);
+ }
+ currentPath.pop();
+ pathLookup.delete(node);
+ }
+ };
+ for (let record of records) {
+ if (!seenEver.has(record)) {
+ traverse(record);
+ }
+ }
+
+ return cycles;
+ }
+
+ // Perform client-side sanity checking that doesn't involve server data
+ _validateClient(problemData, clientRecords) {
+ problemData.clientCycles = this._detectCycles(clientRecords);
+ for (let rootGUID of SYNCED_ROOTS) {
+ let record = clientRecords.find(record =>
+ record.guid === rootGUID);
+ if (!record || record.parentid !== "places") {
+ problemData.badClientRoots.push(rootGUID);
+ }
+ }
+ }
+
+ /**
+ * Compare the list of server records with the client tree.
+ *
+ * Returns the same data as described in the inspectServerRecords comment,
+ * with the following additional fields.
+ * - clientRecords: an array of client records in a similar format to
+ * the .records (ie, server records) entry.
+ * - problemData is the same as for inspectServerRecords, except all properties
+ * will be filled out.
+ */
+ compareServerWithClient(serverRecords, clientTree) {
+
+ let clientRecords = this.createClientRecordsFromTree(clientTree);
+ let inspectionInfo = this.inspectServerRecords(serverRecords);
+ inspectionInfo.clientRecords = clientRecords;
+
+ // Mainly do this to remove deleted items and normalize child guids.
+ serverRecords = inspectionInfo.records;
+ let problemData = inspectionInfo.problemData;
+
+ this._validateClient(problemData, clientRecords);
+
+ let matches = [];
+
+ let allRecords = new Map();
+ let serverDeletedLookup = new Set(inspectionInfo.deletedRecords.map(r => r.id));
+
+ for (let sr of serverRecords) {
+ if (sr.fake) {
+ continue;
+ }
+ allRecords.set(sr.id, {client: null, server: sr});
+ }
+
+ for (let cr of clientRecords) {
+ let unified = allRecords.get(cr.id);
+ if (!unified) {
+ allRecords.set(cr.id, {client: cr, server: null});
+ } else {
+ unified.client = cr;
+ }
+ }
+
+
+ for (let [id, {client, server}] of allRecords) {
+ if (!client && server) {
+ problemData.clientMissing.push(id);
+ continue;
+ }
+ if (!server && client) {
+ if (serverDeletedLookup.has(id)) {
+ problemData.serverDeleted.push(id);
+ } else if (!client.ignored && client.id != "places") {
+ problemData.serverMissing.push(id);
+ }
+ continue;
+ }
+ if (server && client && client.ignored) {
+ problemData.serverUnexpected.push(id);
+ }
+ let differences = [];
+ let structuralDifferences = [];
+
+ // Don't bother comparing titles of roots. It's okay if locally it's
+ // "Mobile Bookmarks", but the server thinks it's "mobile".
+ // TODO: We probably should be handing other localized bookmarks (e.g.
+ // default bookmarks) here as well, see bug 1316041.
+ if (!SYNCED_ROOTS.includes(client.guid)) {
+ // We want to treat undefined, null and an empty string as identical
+ if ((client.title || "") !== (server.title || "")) {
+ differences.push("title");
+ }
+ }
+
+ if (client.parentid || server.parentid) {
+ if (client.parentid !== server.parentid) {
+ structuralDifferences.push('parentid');
+ }
+ }
+
+ if (client.tags || server.tags) {
+ let cl = client.tags || [];
+ let sl = server.tags || [];
+ if (cl.length !== sl.length || !cl.every((tag, i) => sl.indexOf(tag) >= 0)) {
+ differences.push('tags');
+ }
+ }
+
+ let sameType = client.type === server.type;
+ if (!sameType) {
+ if (server.type === "query" && client.type === "bookmark" && client.bmkUri.startsWith("place:")) {
+ sameType = true;
+ }
+ }
+
+
+ if (!sameType) {
+ differences.push('type');
+ } else {
+ switch (server.type) {
+ case 'bookmark':
+ case 'query':
+ if (server.bmkUri !== client.bmkUri) {
+ differences.push('bmkUri');
+ }
+ break;
+ case "livemark":
+ if (server.feedUri != client.feedUri) {
+ differences.push("feedUri");
+ }
+ if (server.siteUri != client.siteUri) {
+ differences.push("siteUri");
+ }
+ break;
+ case 'folder':
+ if (server.id === 'places' && !problemData.rootOnServer) {
+ // It's the fabricated places root. It won't have the GUIDs, but
+ // it doesn't matter.
+ break;
+ }
+ if (client.childGUIDs || server.childGUIDs) {
+ let cl = client.childGUIDs || [];
+ let sl = server.childGUIDs || [];
+ if (cl.length !== sl.length || !cl.every((id, i) => sl[i] === id)) {
+ structuralDifferences.push('childGUIDs');
+ }
+ }
+ break;
+ }
+ }
+
+ if (differences.length) {
+ problemData.differences.push({id, differences});
+ }
+ if (structuralDifferences.length) {
+ problemData.structuralDifferences.push({ id, differences: structuralDifferences });
+ }
+ }
+ return inspectionInfo;
+ }
+
+ _getServerState(engine) {
+ let collection = engine.itemSource();
+ let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
+ collection.full = true;
+ let items = [];
+ collection.recordHandler = function(item) {
+ item.decrypt(collectionKey);
+ items.push(item.cleartext);
+ };
+ let resp = collection.getBatched();
+ if (!resp.success) {
+ throw resp;
+ }
+ return items;
+ }
+
+ validate(engine) {
+ let self = this;
+ return Task.spawn(function*() {
+ let start = Date.now();
+ let clientTree = yield PlacesUtils.promiseBookmarksTree("", {
+ includeItemIds: true
+ });
+ let serverState = self._getServerState(engine);
+ let serverRecordCount = serverState.length;
+ let result = self.compareServerWithClient(serverState, clientTree);
+ let end = Date.now();
+ let duration = end-start;
+ return {
+ duration,
+ version: self.version,
+ problems: result.problemData,
+ recordCount: serverRecordCount
+ };
+ });
+ }
+
+};
+
+BookmarkValidator.prototype.version = BOOKMARK_VALIDATOR_VERSION;
+
diff --git a/services/sync/modules/browserid_identity.js b/services/sync/modules/browserid_identity.js
new file mode 100644
index 000000000..db3821518
--- /dev/null
+++ b/services/sync/modules/browserid_identity.js
@@ -0,0 +1,869 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = ["BrowserIDManager", "AuthenticationError"];
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-common/tokenserverclient.js");
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/tokenserverclient.js");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://services-sync/stages/cluster.js");
+Cu.import("resource://gre/modules/FxAccounts.jsm");
+
+// Lazy imports to prevent unnecessary load on startup.
+XPCOMUtils.defineLazyModuleGetter(this, "Weave",
+ "resource://services-sync/main.js");
+
+XPCOMUtils.defineLazyModuleGetter(this, "BulkKeyBundle",
+ "resource://services-sync/keys.js");
+
+XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts",
+ "resource://gre/modules/FxAccounts.jsm");
+
+XPCOMUtils.defineLazyGetter(this, 'log', function() {
+ let log = Log.repository.getLogger("Sync.BrowserIDManager");
+ log.level = Log.Level[Svc.Prefs.get("log.logger.identity")] || Log.Level.Error;
+ return log;
+});
+
+// FxAccountsCommon.js doesn't use a "namespace", so create one here.
+var fxAccountsCommon = {};
+Cu.import("resource://gre/modules/FxAccountsCommon.js", fxAccountsCommon);
+
+const OBSERVER_TOPICS = [
+ fxAccountsCommon.ONLOGIN_NOTIFICATION,
+ fxAccountsCommon.ONLOGOUT_NOTIFICATION,
+ fxAccountsCommon.ON_ACCOUNT_STATE_CHANGE_NOTIFICATION,
+];
+
+const PREF_SYNC_SHOW_CUSTOMIZATION = "services.sync-setup.ui.showCustomizationDialog";
+
+function deriveKeyBundle(kB) {
+ let out = CryptoUtils.hkdf(kB, undefined,
+ "identity.mozilla.com/picl/v1/oldsync", 2*32);
+ let bundle = new BulkKeyBundle();
+ // [encryptionKey, hmacKey]
+ bundle.keyPair = [out.slice(0, 32), out.slice(32, 64)];
+ return bundle;
+}
+
+/*
+ General authentication error for abstracting authentication
+ errors from multiple sources (e.g., from FxAccounts, TokenServer).
+ details is additional details about the error - it might be a string, or
+ some other error object (which should do the right thing when toString() is
+ called on it)
+*/
+function AuthenticationError(details, source) {
+ this.details = details;
+ this.source = source;
+}
+
+AuthenticationError.prototype = {
+ toString: function() {
+ return "AuthenticationError(" + this.details + ")";
+ }
+}
+
+this.BrowserIDManager = function BrowserIDManager() {
+ // NOTE: _fxaService and _tokenServerClient are replaced with mocks by
+ // the test suite.
+ this._fxaService = fxAccounts;
+ this._tokenServerClient = new TokenServerClient();
+ this._tokenServerClient.observerPrefix = "weave:service";
+ // will be a promise that resolves when we are ready to authenticate
+ this.whenReadyToAuthenticate = null;
+ this._log = log;
+};
+
+this.BrowserIDManager.prototype = {
+ __proto__: IdentityManager.prototype,
+
+ _fxaService: null,
+ _tokenServerClient: null,
+ // https://docs.services.mozilla.com/token/apis.html
+ _token: null,
+ _signedInUser: null, // the signedinuser we got from FxAccounts.
+
+ // null if no error, otherwise a LOGIN_FAILED_* value that indicates why
+ // we failed to authenticate (but note it might not be an actual
+ // authentication problem, just a transient network error or similar)
+ _authFailureReason: null,
+
+ // it takes some time to fetch a sync key bundle, so until this flag is set,
+ // we don't consider the lack of a keybundle as a failure state.
+ _shouldHaveSyncKeyBundle: false,
+
+ get needsCustomization() {
+ try {
+ return Services.prefs.getBoolPref(PREF_SYNC_SHOW_CUSTOMIZATION);
+ } catch (e) {
+ return false;
+ }
+ },
+
+ hashedUID() {
+ if (!this._token) {
+ throw new Error("hashedUID: Don't have token");
+ }
+ return this._token.hashed_fxa_uid
+ },
+
+ deviceID() {
+ return this._signedInUser && this._signedInUser.deviceId;
+ },
+
+ initialize: function() {
+ for (let topic of OBSERVER_TOPICS) {
+ Services.obs.addObserver(this, topic, false);
+ }
+ // and a background fetch of account data just so we can set this.account,
+ // so we have a username available before we've actually done a login.
+ // XXX - this is actually a hack just for tests and really shouldn't be
+ // necessary. Also, you'd think it would be safe to allow this.account to
+ // be set to null when there's no user logged in, but argue with the test
+ // suite, not with me :)
+ this._fxaService.getSignedInUser().then(accountData => {
+ if (accountData) {
+ this.account = accountData.email;
+ }
+ }).catch(err => {
+ // As above, this is only for tests so it is safe to ignore.
+ });
+ },
+
+ /**
+ * Ensure the user is logged in. Returns a promise that resolves when
+ * the user is logged in, or is rejected if the login attempt has failed.
+ */
+ ensureLoggedIn: function() {
+ if (!this._shouldHaveSyncKeyBundle && this.whenReadyToAuthenticate) {
+ // We are already in the process of logging in.
+ return this.whenReadyToAuthenticate.promise;
+ }
+
+ // If we are already happy then there is nothing more to do.
+ if (this._syncKeyBundle) {
+ return Promise.resolve();
+ }
+
+ // Similarly, if we have a previous failure that implies an explicit
+ // re-entering of credentials by the user is necessary we don't take any
+ // further action - an observer will fire when the user does that.
+ if (Weave.Status.login == LOGIN_FAILED_LOGIN_REJECTED) {
+ return Promise.reject(new Error("User needs to re-authenticate"));
+ }
+
+ // So - we've a previous auth problem and aren't currently attempting to
+ // log in - so fire that off.
+ this.initializeWithCurrentIdentity();
+ return this.whenReadyToAuthenticate.promise;
+ },
+
+ finalize: function() {
+ // After this is called, we can expect Service.identity != this.
+ for (let topic of OBSERVER_TOPICS) {
+ Services.obs.removeObserver(this, topic);
+ }
+ this.resetCredentials();
+ this._signedInUser = null;
+ },
+
+ offerSyncOptions: function () {
+ // If the user chose to "Customize sync options" when signing
+ // up with Firefox Accounts, ask them to choose what to sync.
+ const url = "chrome://browser/content/sync/customize.xul";
+ const features = "centerscreen,chrome,modal,dialog,resizable=no";
+ let win = Services.wm.getMostRecentWindow("navigator:browser");
+
+ let data = {accepted: false};
+ win.openDialog(url, "_blank", features, data);
+
+ return data;
+ },
+
+ initializeWithCurrentIdentity: function(isInitialSync=false) {
+ // While this function returns a promise that resolves once we've started
+ // the auth process, that process is complete when
+ // this.whenReadyToAuthenticate.promise resolves.
+ this._log.trace("initializeWithCurrentIdentity");
+
+ // Reset the world before we do anything async.
+ this.whenReadyToAuthenticate = Promise.defer();
+ this.whenReadyToAuthenticate.promise.catch(err => {
+ this._log.error("Could not authenticate", err);
+ });
+
+ // initializeWithCurrentIdentity() can be called after the
+ // identity module was first initialized, e.g., after the
+ // user completes a force authentication, so we should make
+ // sure all credentials are reset before proceeding.
+ this.resetCredentials();
+ this._authFailureReason = null;
+
+ return this._fxaService.getSignedInUser().then(accountData => {
+ if (!accountData) {
+ this._log.info("initializeWithCurrentIdentity has no user logged in");
+ this.account = null;
+ // and we are as ready as we can ever be for auth.
+ this._shouldHaveSyncKeyBundle = true;
+ this.whenReadyToAuthenticate.reject("no user is logged in");
+ return;
+ }
+
+ this.account = accountData.email;
+ this._updateSignedInUser(accountData);
+ // The user must be verified before we can do anything at all; we kick
+ // this and the rest of initialization off in the background (ie, we
+ // don't return the promise)
+ this._log.info("Waiting for user to be verified.");
+ this._fxaService.whenVerified(accountData).then(accountData => {
+ this._updateSignedInUser(accountData);
+ this._log.info("Starting fetch for key bundle.");
+ if (this.needsCustomization) {
+ let data = this.offerSyncOptions();
+ if (data.accepted) {
+ Services.prefs.clearUserPref(PREF_SYNC_SHOW_CUSTOMIZATION);
+
+ // Mark any non-selected engines as declined.
+ Weave.Service.engineManager.declineDisabled();
+ } else {
+ // Log out if the user canceled the dialog.
+ return this._fxaService.signOut();
+ }
+ }
+ }).then(() => {
+ return this._fetchTokenForUser();
+ }).then(token => {
+ this._token = token;
+ this._shouldHaveSyncKeyBundle = true; // and we should actually have one...
+ this.whenReadyToAuthenticate.resolve();
+ this._log.info("Background fetch for key bundle done");
+ Weave.Status.login = LOGIN_SUCCEEDED;
+ if (isInitialSync) {
+ this._log.info("Doing initial sync actions");
+ Svc.Prefs.set("firstSync", "resetClient");
+ Services.obs.notifyObservers(null, "weave:service:setup-complete", null);
+ Weave.Utils.nextTick(Weave.Service.sync, Weave.Service);
+ }
+ }).catch(authErr => {
+ // report what failed...
+ this._log.error("Background fetch for key bundle failed", authErr);
+ this._shouldHaveSyncKeyBundle = true; // but we probably don't have one...
+ this.whenReadyToAuthenticate.reject(authErr);
+ });
+ // and we are done - the fetch continues on in the background...
+ }).catch(err => {
+ this._log.error("Processing logged in account", err);
+ });
+ },
+
+ _updateSignedInUser: function(userData) {
+ // This object should only ever be used for a single user. It is an
+ // error to update the data if the user changes (but updates are still
+ // necessary, as each call may add more attributes to the user).
+ // We start with no user, so an initial update is always ok.
+ if (this._signedInUser && this._signedInUser.email != userData.email) {
+ throw new Error("Attempting to update to a different user.")
+ }
+ this._signedInUser = userData;
+ },
+
+ logout: function() {
+ // This will be called when sync fails (or when the account is being
+ // unlinked etc). It may have failed because we got a 401 from a sync
+ // server, so we nuke the token. Next time sync runs and wants an
+ // authentication header, we will notice the lack of the token and fetch a
+ // new one.
+ this._token = null;
+ },
+
+ observe: function (subject, topic, data) {
+ this._log.debug("observed " + topic);
+ switch (topic) {
+ case fxAccountsCommon.ONLOGIN_NOTIFICATION:
+ // This should only happen if we've been initialized without a current
+ // user - otherwise we'd have seen the LOGOUT notification and been
+ // thrown away.
+ // The exception is when we've initialized with a user that needs to
+ // reauth with the server - in that case we will also get here, but
+ // should have the same identity.
+ // initializeWithCurrentIdentity will throw and log if these constraints
+ // aren't met (indirectly, via _updateSignedInUser()), so just go ahead
+ // and do the init.
+ this.initializeWithCurrentIdentity(true);
+ break;
+
+ case fxAccountsCommon.ONLOGOUT_NOTIFICATION:
+ Weave.Service.startOver();
+ // startOver will cause this instance to be thrown away, so there's
+ // nothing else to do.
+ break;
+
+ case fxAccountsCommon.ON_ACCOUNT_STATE_CHANGE_NOTIFICATION:
+ // throw away token and fetch a new one
+ this.resetCredentials();
+ this._ensureValidToken().catch(err =>
+ this._log.error("Error while fetching a new token", err));
+ break;
+ }
+ },
+
+ /**
+ * Compute the sha256 of the message bytes. Return bytes.
+ */
+ _sha256: function(message) {
+ let hasher = Cc["@mozilla.org/security/hash;1"]
+ .createInstance(Ci.nsICryptoHash);
+ hasher.init(hasher.SHA256);
+ return CryptoUtils.digestBytes(message, hasher);
+ },
+
+ /**
+ * Compute the X-Client-State header given the byte string kB.
+ *
+ * Return string: hex(first16Bytes(sha256(kBbytes)))
+ */
+ _computeXClientState: function(kBbytes) {
+ return CommonUtils.bytesAsHex(this._sha256(kBbytes).slice(0, 16), false);
+ },
+
+ /**
+ * Provide override point for testing token expiration.
+ */
+ _now: function() {
+ return this._fxaService.now()
+ },
+
+ get _localtimeOffsetMsec() {
+ return this._fxaService.localtimeOffsetMsec;
+ },
+
+ usernameFromAccount: function(val) {
+ // we don't differentiate between "username" and "account"
+ return val;
+ },
+
+ /**
+ * Obtains the HTTP Basic auth password.
+ *
+ * Returns a string if set or null if it is not set.
+ */
+ get basicPassword() {
+ this._log.error("basicPassword getter should be not used in BrowserIDManager");
+ return null;
+ },
+
+ /**
+ * Set the HTTP basic password to use.
+ *
+ * Changes will not persist unless persistSyncCredentials() is called.
+ */
+ set basicPassword(value) {
+ throw "basicPassword setter should be not used in BrowserIDManager";
+ },
+
+ /**
+ * Obtain the Sync Key.
+ *
+ * This returns a 26 character "friendly" Base32 encoded string on success or
+ * null if no Sync Key could be found.
+ *
+ * If the Sync Key hasn't been set in this session, this will look in the
+ * password manager for the sync key.
+ */
+ get syncKey() {
+ if (this.syncKeyBundle) {
+ // TODO: This is probably fine because the code shouldn't be
+ // using the sync key directly (it should use the sync key
+ // bundle), but I don't like it. We should probably refactor
+ // code that is inspecting this to not do validation on this
+ // field directly and instead call a isSyncKeyValid() function
+ // that we can override.
+ return "99999999999999999999999999";
+ }
+ else {
+ return null;
+ }
+ },
+
+ set syncKey(value) {
+ throw "syncKey setter should be not used in BrowserIDManager";
+ },
+
+ get syncKeyBundle() {
+ return this._syncKeyBundle;
+ },
+
+ /**
+ * Resets/Drops all credentials we hold for the current user.
+ */
+ resetCredentials: function() {
+ this.resetSyncKey();
+ this._token = null;
+ // The cluster URL comes from the token, so resetting it to empty will
+ // force Sync to not accidentally use a value from an earlier token.
+ Weave.Service.clusterURL = null;
+ },
+
+ /**
+ * Resets/Drops the sync key we hold for the current user.
+ */
+ resetSyncKey: function() {
+ this._syncKey = null;
+ this._syncKeyBundle = null;
+ this._syncKeyUpdated = true;
+ this._shouldHaveSyncKeyBundle = false;
+ },
+
+ /**
+ * Pre-fetches any information that might help with migration away from this
+ * identity. Called after every sync and is really just an optimization that
+ * allows us to avoid a network request for when we actually need the
+ * migration info.
+ */
+ prefetchMigrationSentinel: function(service) {
+ // nothing to do here until we decide to migrate away from FxA.
+ },
+
+ /**
+ * Return credentials hosts for this identity only.
+ */
+ _getSyncCredentialsHosts: function() {
+ return Utils.getSyncCredentialsHostsFxA();
+ },
+
+ /**
+ * The current state of the auth credentials.
+ *
+ * This essentially validates that enough credentials are available to use
+ * Sync. It doesn't check we have all the keys we need as the master-password
+ * may have been locked when we tried to get them - we rely on
+ * unlockAndVerifyAuthState to check that for us.
+ */
+ get currentAuthState() {
+ if (this._authFailureReason) {
+ this._log.info("currentAuthState returning " + this._authFailureReason +
+ " due to previous failure");
+ return this._authFailureReason;
+ }
+ // TODO: need to revisit this. Currently this isn't ready to go until
+ // both the username and syncKeyBundle are both configured and having no
+ // username seems to make things fail fast so that's good.
+ if (!this.username) {
+ return LOGIN_FAILED_NO_USERNAME;
+ }
+
+ return STATUS_OK;
+ },
+
+ // Do we currently have keys, or do we have enough that we should be able
+ // to successfully fetch them?
+ _canFetchKeys: function() {
+ let userData = this._signedInUser;
+ // a keyFetchToken means we can almost certainly grab them.
+ // kA and kB means we already have them.
+ return userData && (userData.keyFetchToken || (userData.kA && userData.kB));
+ },
+
+ /**
+ * Verify the current auth state, unlocking the master-password if necessary.
+ *
+ * Returns a promise that resolves with the current auth state after
+ * attempting to unlock.
+ */
+ unlockAndVerifyAuthState: function() {
+ if (this._canFetchKeys()) {
+ log.debug("unlockAndVerifyAuthState already has (or can fetch) sync keys");
+ return Promise.resolve(STATUS_OK);
+ }
+ // so no keys - ensure MP unlocked.
+ if (!Utils.ensureMPUnlocked()) {
+ // user declined to unlock, so we don't know if they are stored there.
+ log.debug("unlockAndVerifyAuthState: user declined to unlock master-password");
+ return Promise.resolve(MASTER_PASSWORD_LOCKED);
+ }
+ // now we are unlocked we must re-fetch the user data as we may now have
+ // the details that were previously locked away.
+ return this._fxaService.getSignedInUser().then(
+ accountData => {
+ this._updateSignedInUser(accountData);
+ // If we still can't get keys it probably means the user authenticated
+ // without unlocking the MP or cleared the saved logins, so we've now
+ // lost them - the user will need to reauth before continuing.
+ let result;
+ if (this._canFetchKeys()) {
+ result = STATUS_OK;
+ } else {
+ result = LOGIN_FAILED_LOGIN_REJECTED;
+ }
+ log.debug("unlockAndVerifyAuthState re-fetched credentials and is returning", result);
+ return result;
+ }
+ );
+ },
+
+ /**
+ * Do we have a non-null, not yet expired token for the user currently
+ * signed in?
+ */
+ hasValidToken: function() {
+ // If pref is set to ignore cached authentication credentials for debugging,
+ // then return false to force the fetching of a new token.
+ let ignoreCachedAuthCredentials = false;
+ try {
+ ignoreCachedAuthCredentials = Svc.Prefs.get("debug.ignoreCachedAuthCredentials");
+ } catch(e) {
+ // Pref doesn't exist
+ }
+ if (ignoreCachedAuthCredentials) {
+ return false;
+ }
+ if (!this._token) {
+ return false;
+ }
+ if (this._token.expiration < this._now()) {
+ return false;
+ }
+ return true;
+ },
+
+ // Get our tokenServerURL - a private helper. Returns a string.
+ get _tokenServerUrl() {
+ // We used to support services.sync.tokenServerURI but this was a
+ // pain-point for people using non-default servers as Sync may auto-reset
+ // all services.sync prefs. So if that still exists, it wins.
+ let url = Svc.Prefs.get("tokenServerURI"); // Svc.Prefs "root" is services.sync
+ if (!url) {
+ url = Services.prefs.getCharPref("identity.sync.tokenserver.uri");
+ }
+ while (url.endsWith("/")) { // trailing slashes cause problems...
+ url = url.slice(0, -1);
+ }
+ return url;
+ },
+
+ // Refresh the sync token for our user. Returns a promise that resolves
+ // with a token (which may be null in one sad edge-case), or rejects with an
+ // error.
+ _fetchTokenForUser: function() {
+ // tokenServerURI is mis-named - convention is uri means nsISomething...
+ let tokenServerURI = this._tokenServerUrl;
+ let log = this._log;
+ let client = this._tokenServerClient;
+ let fxa = this._fxaService;
+ let userData = this._signedInUser;
+
+ // We need kA and kB for things to work. If we don't have them, just
+ // return null for the token - sync calling unlockAndVerifyAuthState()
+ // before actually syncing will setup the error states if necessary.
+ if (!this._canFetchKeys()) {
+ log.info("Unable to fetch keys (master-password locked?), so aborting token fetch");
+ return Promise.resolve(null);
+ }
+
+ let maybeFetchKeys = () => {
+ // This is called at login time and every time we need a new token - in
+ // the latter case we already have kA and kB, so optimise that case.
+ if (userData.kA && userData.kB) {
+ return;
+ }
+ log.info("Fetching new keys");
+ return this._fxaService.getKeys().then(
+ newUserData => {
+ userData = newUserData;
+ this._updateSignedInUser(userData); // throws if the user changed.
+ }
+ );
+ }
+
+ let getToken = assertion => {
+ log.debug("Getting a token");
+ let deferred = Promise.defer();
+ let cb = function (err, token) {
+ if (err) {
+ return deferred.reject(err);
+ }
+ log.debug("Successfully got a sync token");
+ return deferred.resolve(token);
+ };
+
+ let kBbytes = CommonUtils.hexToBytes(userData.kB);
+ let headers = {"X-Client-State": this._computeXClientState(kBbytes)};
+ client.getTokenFromBrowserIDAssertion(tokenServerURI, assertion, cb, headers);
+ return deferred.promise;
+ }
+
+ let getAssertion = () => {
+ log.info("Getting an assertion from", tokenServerURI);
+ let audience = Services.io.newURI(tokenServerURI, null, null).prePath;
+ return fxa.getAssertion(audience);
+ };
+
+ // wait until the account email is verified and we know that
+ // getAssertion() will return a real assertion (not null).
+ return fxa.whenVerified(this._signedInUser)
+ .then(() => maybeFetchKeys())
+ .then(() => getAssertion())
+ .then(assertion => getToken(assertion))
+ .catch(err => {
+ // If we get a 401 fetching the token it may be that our certificate
+ // needs to be regenerated.
+ if (!err.response || err.response.status !== 401) {
+ return Promise.reject(err);
+ }
+ log.warn("Token server returned 401, refreshing certificate and retrying token fetch");
+ return fxa.invalidateCertificate()
+ .then(() => getAssertion())
+ .then(assertion => getToken(assertion))
+ })
+ .then(token => {
+ // TODO: Make it be only 80% of the duration, so refresh the token
+ // before it actually expires. This is to avoid sync storage errors
+ // otherwise, we get a nasty notification bar briefly. Bug 966568.
+ token.expiration = this._now() + (token.duration * 1000) * 0.80;
+ if (!this._syncKeyBundle) {
+ // We are given kA/kB as hex.
+ this._syncKeyBundle = deriveKeyBundle(Utils.hexToBytes(userData.kB));
+ }
+ return token;
+ })
+ .catch(err => {
+ // TODO: unify these errors - we need to handle errors thrown by
+ // both tokenserverclient and hawkclient.
+ // A tokenserver error thrown based on a bad response.
+ if (err.response && err.response.status === 401) {
+ err = new AuthenticationError(err, "tokenserver");
+ // A hawkclient error.
+ } else if (err.code && err.code === 401) {
+ err = new AuthenticationError(err, "hawkclient");
+ // An FxAccounts.jsm error.
+ } else if (err.message == fxAccountsCommon.ERROR_AUTH_ERROR) {
+ err = new AuthenticationError(err, "fxaccounts");
+ }
+
+ // TODO: write tests to make sure that different auth error cases are handled here
+ // properly: auth error getting assertion, auth error getting token (invalid generation
+ // and client-state error)
+ if (err instanceof AuthenticationError) {
+ this._log.error("Authentication error in _fetchTokenForUser", err);
+ // set it to the "fatal" LOGIN_FAILED_LOGIN_REJECTED reason.
+ this._authFailureReason = LOGIN_FAILED_LOGIN_REJECTED;
+ } else {
+ this._log.error("Non-authentication error in _fetchTokenForUser", err);
+ // for now assume it is just a transient network related problem
+ // (although sadly, it might also be a regular unhandled exception)
+ this._authFailureReason = LOGIN_FAILED_NETWORK_ERROR;
+ }
+ // this._authFailureReason being set to be non-null in the above if clause
+ // ensures we are in the correct currentAuthState, and
+ // this._shouldHaveSyncKeyBundle being true ensures everything that cares knows
+ // that there is no authentication dance still under way.
+ this._shouldHaveSyncKeyBundle = true;
+ Weave.Status.login = this._authFailureReason;
+ throw err;
+ });
+ },
+
+ // Returns a promise that is resolved when we have a valid token for the
+ // current user stored in this._token. When resolved, this._token is valid.
+ _ensureValidToken: function() {
+ if (this.hasValidToken()) {
+ this._log.debug("_ensureValidToken already has one");
+ return Promise.resolve();
+ }
+ const notifyStateChanged =
+ () => Services.obs.notifyObservers(null, "weave:service:login:change", null);
+ // reset this._token as a safety net to reduce the possibility of us
+ // repeatedly attempting to use an invalid token if _fetchTokenForUser throws.
+ this._token = null;
+ return this._fetchTokenForUser().then(
+ token => {
+ this._token = token;
+ notifyStateChanged();
+ },
+ error => {
+ notifyStateChanged();
+ throw error
+ }
+ );
+ },
+
+ getResourceAuthenticator: function () {
+ return this._getAuthenticationHeader.bind(this);
+ },
+
+ /**
+ * Obtain a function to be used for adding auth to RESTRequest instances.
+ */
+ getRESTRequestAuthenticator: function() {
+ return this._addAuthenticationHeader.bind(this);
+ },
+
+ /**
+ * @return a Hawk HTTP Authorization Header, lightly wrapped, for the .uri
+ * of a RESTRequest or AsyncResponse object.
+ */
+ _getAuthenticationHeader: function(httpObject, method) {
+ let cb = Async.makeSpinningCallback();
+ this._ensureValidToken().then(cb, cb);
+ // Note that in failure states we return null, causing the request to be
+ // made without authorization headers, thereby presumably causing a 401,
+ // which causes Sync to log out. If we throw, this may not happen as
+ // expected.
+ try {
+ cb.wait();
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.error("Failed to fetch a token for authentication", ex);
+ return null;
+ }
+ if (!this._token) {
+ return null;
+ }
+ let credentials = {algorithm: "sha256",
+ id: this._token.id,
+ key: this._token.key,
+ };
+ method = method || httpObject.method;
+
+ // Get the local clock offset from the Firefox Accounts server. This should
+ // be close to the offset from the storage server.
+ let options = {
+ now: this._now(),
+ localtimeOffsetMsec: this._localtimeOffsetMsec,
+ credentials: credentials,
+ };
+
+ let headerValue = CryptoUtils.computeHAWK(httpObject.uri, method, options);
+ return {headers: {authorization: headerValue.field}};
+ },
+
+ _addAuthenticationHeader: function(request, method) {
+ let header = this._getAuthenticationHeader(request, method);
+ if (!header) {
+ return null;
+ }
+ request.setHeader("authorization", header.headers.authorization);
+ return request;
+ },
+
+ createClusterManager: function(service) {
+ return new BrowserIDClusterManager(service);
+ },
+
+ // Tell Sync what the login status should be if it saw a 401 fetching
+ // info/collections as part of login verification (typically immediately
+ // after login.)
+ // In our case, it almost certainly means a transient error fetching a token
+ // (and hitting this will cause us to logout, which will correctly handle an
+ // authoritative login issue.)
+ loginStatusFromVerification404() {
+ return LOGIN_FAILED_NETWORK_ERROR;
+ },
+};
+
+/* An implementation of the ClusterManager for this identity
+ */
+
+function BrowserIDClusterManager(service) {
+ ClusterManager.call(this, service);
+}
+
+BrowserIDClusterManager.prototype = {
+ __proto__: ClusterManager.prototype,
+
+ _findCluster: function() {
+ let endPointFromIdentityToken = function() {
+ // The only reason (in theory ;) that we can end up with a null token
+ // is when this.identity._canFetchKeys() returned false. In turn, this
+ // should only happen if the master-password is locked or the credentials
+ // storage is screwed, and in those cases we shouldn't have started
+ // syncing so shouldn't get here anyway.
+ // But better safe than sorry! To keep things clearer, throw an explicit
+ // exception - the message will appear in the logs and the error will be
+ // treated as transient.
+ if (!this.identity._token) {
+ throw new Error("Can't get a cluster URL as we can't fetch keys.");
+ }
+ let endpoint = this.identity._token.endpoint;
+ // For Sync 1.5 storage endpoints, we use the base endpoint verbatim.
+ // However, it should end in "/" because we will extend it with
+ // well known path components. So we add a "/" if it's missing.
+ if (!endpoint.endsWith("/")) {
+ endpoint += "/";
+ }
+ log.debug("_findCluster returning " + endpoint);
+ return endpoint;
+ }.bind(this);
+
+ // Spinningly ensure we are ready to authenticate and have a valid token.
+ let promiseClusterURL = function() {
+ return this.identity.whenReadyToAuthenticate.promise.then(
+ () => {
+ // We need to handle node reassignment here. If we are being asked
+ // for a clusterURL while the service already has a clusterURL, then
+ // it's likely a 401 was received using the existing token - in which
+ // case we just discard the existing token and fetch a new one.
+ if (this.service.clusterURL) {
+ log.debug("_findCluster has a pre-existing clusterURL, so discarding the current token");
+ this.identity._token = null;
+ }
+ return this.identity._ensureValidToken();
+ }
+ ).then(endPointFromIdentityToken
+ );
+ }.bind(this);
+
+ let cb = Async.makeSpinningCallback();
+ promiseClusterURL().then(function (clusterURL) {
+ cb(null, clusterURL);
+ }).then(
+ null, err => {
+ log.info("Failed to fetch the cluster URL", err);
+ // service.js's verifyLogin() method will attempt to fetch a cluster
+ // URL when it sees a 401. If it gets null, it treats it as a "real"
+ // auth error and sets Status.login to LOGIN_FAILED_LOGIN_REJECTED, which
+ // in turn causes a notification bar to appear informing the user they
+ // need to re-authenticate.
+ // On the other hand, if fetching the cluster URL fails with an exception,
+ // verifyLogin() assumes it is a transient error, and thus doesn't show
+ // the notification bar under the assumption the issue will resolve
+ // itself.
+ // Thus:
+ // * On a real 401, we must return null.
+ // * On any other problem we must let an exception bubble up.
+ if (err instanceof AuthenticationError) {
+ // callback with no error and a null result - cb.wait() returns null.
+ cb(null, null);
+ } else {
+ // callback with an error - cb.wait() completes by raising an exception.
+ cb(err);
+ }
+ });
+ return cb.wait();
+ },
+
+ getUserBaseURL: function() {
+ // Legacy Sync and FxA Sync construct the userBaseURL differently. Legacy
+ // Sync appends path components onto an empty path, and in FxA Sync the
+ // token server constructs this for us in an opaque manner. Since the
+ // cluster manager already sets the clusterURL on Service and also has
+ // access to the current identity, we added this functionality here.
+ return this.service.clusterURL;
+ }
+}
diff --git a/services/sync/modules/collection_validator.js b/services/sync/modules/collection_validator.js
new file mode 100644
index 000000000..41141bba3
--- /dev/null
+++ b/services/sync/modules/collection_validator.js
@@ -0,0 +1,204 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+const Cu = Components.utils;
+
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/main.js");
+
+this.EXPORTED_SYMBOLS = ["CollectionValidator", "CollectionProblemData"];
+
+class CollectionProblemData {
+ constructor() {
+ this.missingIDs = 0;
+ this.duplicates = [];
+ this.clientMissing = [];
+ this.serverMissing = [];
+ this.serverDeleted = [];
+ this.serverUnexpected = [];
+ this.differences = [];
+ }
+
+ /**
+ * Produce a list summarizing problems found. Each entry contains {name, count},
+ * where name is the field name for the problem, and count is the number of times
+ * the problem was encountered.
+ *
+ * Validation has failed if all counts are not 0.
+ */
+ getSummary() {
+ return [
+ { name: "clientMissing", count: this.clientMissing.length },
+ { name: "serverMissing", count: this.serverMissing.length },
+ { name: "serverDeleted", count: this.serverDeleted.length },
+ { name: "serverUnexpected", count: this.serverUnexpected.length },
+ { name: "differences", count: this.differences.length },
+ { name: "missingIDs", count: this.missingIDs },
+ { name: "duplicates", count: this.duplicates.length }
+ ];
+ }
+}
+
+class CollectionValidator {
+ // Construct a generic collection validator. This is intended to be called by
+ // subclasses.
+ // - name: Name of the engine
+ // - idProp: Property that identifies a record. That is, if a client and server
+ // record have the same value for the idProp property, they should be
+ // compared against eachother.
+ // - props: Array of properties that should be compared
+ constructor(name, idProp, props) {
+ this.name = name;
+ this.props = props;
+ this.idProp = idProp;
+ }
+
+ // Should a custom ProblemData type be needed, return it here.
+ emptyProblemData() {
+ return new CollectionProblemData();
+ }
+
+ getServerItems(engine) {
+ let collection = engine.itemSource();
+ let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
+ collection.full = true;
+ let items = [];
+ collection.recordHandler = function(item) {
+ item.decrypt(collectionKey);
+ items.push(item.cleartext);
+ };
+ let resp = collection.getBatched();
+ if (!resp.success) {
+ throw resp;
+ }
+ return items;
+ }
+
+ // Should return a promise that resolves to an array of client items.
+ getClientItems() {
+ return Promise.reject("Must implement");
+ }
+
+ // Turn the client item into something that can be compared with the server item,
+ // and is also safe to mutate.
+ normalizeClientItem(item) {
+ return Cu.cloneInto(item, {});
+ }
+
+ // Turn the server item into something that can be easily compared with the client
+ // items.
+ normalizeServerItem(item) {
+ return item;
+ }
+
+ // Return whether or not a server item should be present on the client. Expected
+ // to be overridden.
+ clientUnderstands(item) {
+ return true;
+ }
+
+ // Return whether or not a client item should be present on the server. Expected
+ // to be overridden
+ syncedByClient(item) {
+ return true;
+ }
+
+ // Compare the server item and the client item, and return a list of property
+ // names that are different. Can be overridden if needed.
+ getDifferences(client, server) {
+ let differences = [];
+ for (let prop of this.props) {
+ let clientProp = client[prop];
+ let serverProp = server[prop];
+ if ((clientProp || "") !== (serverProp || "")) {
+ differences.push(prop);
+ }
+ }
+ return differences;
+ }
+
+ // Returns an object containing
+ // problemData: an instance of the class returned by emptyProblemData(),
+ // clientRecords: Normalized client records
+ // records: Normalized server records,
+ // deletedRecords: Array of ids that were marked as deleted by the server.
+ compareClientWithServer(clientItems, serverItems) {
+ clientItems = clientItems.map(item => this.normalizeClientItem(item));
+ serverItems = serverItems.map(item => this.normalizeServerItem(item));
+ let problems = this.emptyProblemData();
+ let seenServer = new Map();
+ let serverDeleted = new Set();
+ let allRecords = new Map();
+
+ for (let record of serverItems) {
+ let id = record[this.idProp];
+ if (!id) {
+ ++problems.missingIDs;
+ continue;
+ }
+ if (record.deleted) {
+ serverDeleted.add(record);
+ } else {
+ let possibleDupe = seenServer.get(id);
+ if (possibleDupe) {
+ problems.duplicates.push(id);
+ } else {
+ seenServer.set(id, record);
+ allRecords.set(id, { server: record, client: null, });
+ }
+ record.understood = this.clientUnderstands(record);
+ }
+ }
+
+ let recordPairs = [];
+ let seenClient = new Map();
+ for (let record of clientItems) {
+ let id = record[this.idProp];
+ record.shouldSync = this.syncedByClient(record);
+ seenClient.set(id, record);
+ let combined = allRecords.get(id);
+ if (combined) {
+ combined.client = record;
+ } else {
+ allRecords.set(id, { client: record, server: null });
+ }
+ }
+
+ for (let [id, { server, client }] of allRecords) {
+ if (!client && !server) {
+ throw new Error("Impossible: no client or server record for " + id);
+ } else if (server && !client) {
+ if (server.understood) {
+ problems.clientMissing.push(id);
+ }
+ } else if (client && !server) {
+ if (client.shouldSync) {
+ problems.serverMissing.push(id);
+ }
+ } else {
+ if (!client.shouldSync) {
+ if (!problems.serverUnexpected.includes(id)) {
+ problems.serverUnexpected.push(id);
+ }
+ continue;
+ }
+ let differences = this.getDifferences(client, server);
+ if (differences && differences.length) {
+ problems.differences.push({ id, differences });
+ }
+ }
+ }
+ return {
+ problemData: problems,
+ clientRecords: clientItems,
+ records: serverItems,
+ deletedRecords: [...serverDeleted]
+ };
+ }
+}
+
+// Default to 0, some engines may override.
+CollectionValidator.prototype.version = 0;
diff --git a/services/sync/modules/constants.js b/services/sync/modules/constants.js
new file mode 100644
index 000000000..f70bbd61c
--- /dev/null
+++ b/services/sync/modules/constants.js
@@ -0,0 +1,198 @@
+#filter substitution
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// Process each item in the "constants hash" to add to "global" and give a name
+this.EXPORTED_SYMBOLS = [];
+for (let [key, val] of Object.entries({
+
+WEAVE_VERSION: "@weave_version@",
+
+// Sync Server API version that the client supports.
+SYNC_API_VERSION: "1.1",
+USER_API_VERSION: "1.0",
+MISC_API_VERSION: "1.0",
+
+// Version of the data format this client supports. The data format describes
+// how records are packaged; this is separate from the Server API version and
+// the per-engine cleartext formats.
+STORAGE_VERSION: 5,
+PREFS_BRANCH: "services.sync.",
+
+// Host "key" to access Weave Identity in the password manager
+PWDMGR_HOST: "chrome://weave",
+PWDMGR_PASSWORD_REALM: "Mozilla Services Password",
+PWDMGR_PASSPHRASE_REALM: "Mozilla Services Encryption Passphrase",
+PWDMGR_KEYBUNDLE_REALM: "Mozilla Services Key Bundles",
+
+// Put in [] because those aren't allowed in a collection name.
+DEFAULT_KEYBUNDLE_NAME: "[default]",
+
+// Our extra input to SHA256-HMAC in generateEntry.
+// This includes the full crypto spec; change this when our algo changes.
+HMAC_INPUT: "Sync-AES_256_CBC-HMAC256",
+
+// Key dimensions.
+SYNC_KEY_ENCODED_LENGTH: 26,
+SYNC_KEY_DECODED_LENGTH: 16,
+SYNC_KEY_HYPHENATED_LENGTH: 31, // 26 chars, 5 hyphens.
+
+NO_SYNC_NODE_INTERVAL: 10 * 60 * 1000, // 10 minutes
+
+MAX_ERROR_COUNT_BEFORE_BACKOFF: 3,
+MAX_IGNORE_ERROR_COUNT: 5,
+
+// Backoff intervals
+MINIMUM_BACKOFF_INTERVAL: 15 * 60 * 1000, // 15 minutes
+MAXIMUM_BACKOFF_INTERVAL: 8 * 60 * 60 * 1000, // 8 hours
+
+// HMAC event handling timeout.
+// 10 minutes: a compromise between the multi-desktop sync interval
+// and the mobile sync interval.
+HMAC_EVENT_INTERVAL: 600000,
+
+// How long to wait between sync attempts if the Master Password is locked.
+MASTER_PASSWORD_LOCKED_RETRY_INTERVAL: 15 * 60 * 1000, // 15 minutes
+
+// The default for how long we "block" sync from running when doing a migration.
+DEFAULT_BLOCK_PERIOD: 2 * 24 * 60 * 60 * 1000, // 2 days
+
+// Separate from the ID fetch batch size to allow tuning for mobile.
+MOBILE_BATCH_SIZE: 50,
+
+// 50 is hardcoded here because of URL length restrictions.
+// (GUIDs can be up to 64 chars long.)
+// Individual engines can set different values for their limit if their
+// identifiers are shorter.
+DEFAULT_GUID_FETCH_BATCH_SIZE: 50,
+DEFAULT_MOBILE_GUID_FETCH_BATCH_SIZE: 50,
+
+// Default batch size for applying incoming records.
+DEFAULT_STORE_BATCH_SIZE: 1,
+HISTORY_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE
+FORMS_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE
+PASSWORDS_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE
+ADDONS_STORE_BATCH_SIZE: 1000000, // process all addons at once
+APPS_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE
+
+// Default batch size for download batching
+// (how many records are fetched at a time from the server when batching is used).
+DEFAULT_DOWNLOAD_BATCH_SIZE: 1000,
+
+// score thresholds for early syncs
+SINGLE_USER_THRESHOLD: 1000,
+MULTI_DEVICE_THRESHOLD: 300,
+
+// Other score increment constants
+SCORE_INCREMENT_SMALL: 1,
+SCORE_INCREMENT_MEDIUM: 10,
+
+// Instant sync score increment
+SCORE_INCREMENT_XLARGE: 300 + 1, //MULTI_DEVICE_THRESHOLD + 1
+
+// Delay before incrementing global score
+SCORE_UPDATE_DELAY: 100,
+
+// Delay for the back observer debouncer. This is chosen to be longer than any
+// observed spurious idle/back events and short enough to pre-empt user activity.
+IDLE_OBSERVER_BACK_DELAY: 100,
+
+// Max number of records or bytes to upload in a single POST - we'll do multiple POSTS if either
+// MAX_UPLOAD_RECORDS or MAX_UPLOAD_BYTES is hit)
+MAX_UPLOAD_RECORDS: 100,
+MAX_UPLOAD_BYTES: 1024 * 1023, // just under 1MB
+MAX_HISTORY_UPLOAD: 5000,
+MAX_HISTORY_DOWNLOAD: 5000,
+
+// TTL of the message sent to another device when sending a tab
+NOTIFY_TAB_SENT_TTL_SECS: 1 * 3600, // 1 hour
+
+// Top-level statuses:
+STATUS_OK: "success.status_ok",
+SYNC_FAILED: "error.sync.failed",
+LOGIN_FAILED: "error.login.failed",
+SYNC_FAILED_PARTIAL: "error.sync.failed_partial",
+CLIENT_NOT_CONFIGURED: "service.client_not_configured",
+STATUS_DISABLED: "service.disabled",
+MASTER_PASSWORD_LOCKED: "service.master_password_locked",
+
+// success states
+LOGIN_SUCCEEDED: "success.login",
+SYNC_SUCCEEDED: "success.sync",
+ENGINE_SUCCEEDED: "success.engine",
+
+// login failure status codes:
+LOGIN_FAILED_NO_USERNAME: "error.login.reason.no_username",
+LOGIN_FAILED_NO_PASSWORD: "error.login.reason.no_password2",
+LOGIN_FAILED_NO_PASSPHRASE: "error.login.reason.no_recoverykey",
+LOGIN_FAILED_NETWORK_ERROR: "error.login.reason.network",
+LOGIN_FAILED_SERVER_ERROR: "error.login.reason.server",
+LOGIN_FAILED_INVALID_PASSPHRASE: "error.login.reason.recoverykey",
+LOGIN_FAILED_LOGIN_REJECTED: "error.login.reason.account",
+
+// sync failure status codes
+METARECORD_DOWNLOAD_FAIL: "error.sync.reason.metarecord_download_fail",
+VERSION_OUT_OF_DATE: "error.sync.reason.version_out_of_date",
+DESKTOP_VERSION_OUT_OF_DATE: "error.sync.reason.desktop_version_out_of_date",
+SETUP_FAILED_NO_PASSPHRASE: "error.sync.reason.setup_failed_no_passphrase",
+CREDENTIALS_CHANGED: "error.sync.reason.credentials_changed",
+ABORT_SYNC_COMMAND: "aborting sync, process commands said so",
+NO_SYNC_NODE_FOUND: "error.sync.reason.no_node_found",
+OVER_QUOTA: "error.sync.reason.over_quota",
+PROLONGED_SYNC_FAILURE: "error.sync.prolonged_failure",
+SERVER_MAINTENANCE: "error.sync.reason.serverMaintenance",
+
+RESPONSE_OVER_QUOTA: "14",
+
+// engine failure status codes
+ENGINE_UPLOAD_FAIL: "error.engine.reason.record_upload_fail",
+ENGINE_DOWNLOAD_FAIL: "error.engine.reason.record_download_fail",
+ENGINE_UNKNOWN_FAIL: "error.engine.reason.unknown_fail",
+ENGINE_APPLY_FAIL: "error.engine.reason.apply_fail",
+ENGINE_METARECORD_DOWNLOAD_FAIL: "error.engine.reason.metarecord_download_fail",
+ENGINE_METARECORD_UPLOAD_FAIL: "error.engine.reason.metarecord_upload_fail",
+// an upload failure where the batch was interrupted with a 412
+ENGINE_BATCH_INTERRUPTED: "error.engine.reason.batch_interrupted",
+
+JPAKE_ERROR_CHANNEL: "jpake.error.channel",
+JPAKE_ERROR_NETWORK: "jpake.error.network",
+JPAKE_ERROR_SERVER: "jpake.error.server",
+JPAKE_ERROR_TIMEOUT: "jpake.error.timeout",
+JPAKE_ERROR_INTERNAL: "jpake.error.internal",
+JPAKE_ERROR_INVALID: "jpake.error.invalid",
+JPAKE_ERROR_NODATA: "jpake.error.nodata",
+JPAKE_ERROR_KEYMISMATCH: "jpake.error.keymismatch",
+JPAKE_ERROR_WRONGMESSAGE: "jpake.error.wrongmessage",
+JPAKE_ERROR_USERABORT: "jpake.error.userabort",
+JPAKE_ERROR_DELAYUNSUPPORTED: "jpake.error.delayunsupported",
+
+// info types for Service.getStorageInfo
+INFO_COLLECTIONS: "collections",
+INFO_COLLECTION_USAGE: "collection_usage",
+INFO_COLLECTION_COUNTS: "collection_counts",
+INFO_QUOTA: "quota",
+
+// Ways that a sync can be disabled (messages only to be printed in debug log)
+kSyncMasterPasswordLocked: "User elected to leave Master Password locked",
+kSyncWeaveDisabled: "Weave is disabled",
+kSyncNetworkOffline: "Network is offline",
+kSyncBackoffNotMet: "Trying to sync before the server said it's okay",
+kFirstSyncChoiceNotMade: "User has not selected an action for first sync",
+
+// Application IDs
+FIREFOX_ID: "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+FENNEC_ID: "{a23983c0-fd0e-11dc-95ff-0800200c9a66}",
+SEAMONKEY_ID: "{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}",
+TEST_HARNESS_ID: "xuth@mozilla.org",
+
+MIN_PP_LENGTH: 12,
+MIN_PASS_LENGTH: 8,
+
+DEVICE_TYPE_DESKTOP: "desktop",
+DEVICE_TYPE_MOBILE: "mobile",
+
+})) {
+ this[key] = val;
+ this.EXPORTED_SYMBOLS.push(key);
+}
diff --git a/services/sync/modules/engines.js b/services/sync/modules/engines.js
new file mode 100644
index 000000000..1eaa1863a
--- /dev/null
+++ b/services/sync/modules/engines.js
@@ -0,0 +1,1813 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = [
+ "EngineManager",
+ "Engine",
+ "SyncEngine",
+ "Tracker",
+ "Store",
+ "Changeset"
+];
+
+var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
+
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/util.js");
+
+XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts",
+ "resource://gre/modules/FxAccounts.jsm");
+
+/*
+ * Trackers are associated with a single engine and deal with
+ * listening for changes to their particular data type.
+ *
+ * There are two things they keep track of:
+ * 1) A score, indicating how urgently the engine wants to sync
+ * 2) A list of IDs for all the changed items that need to be synced
+ * and updating their 'score', indicating how urgently they
+ * want to sync.
+ *
+ */
+this.Tracker = function Tracker(name, engine) {
+ if (!engine) {
+ throw new Error("Tracker must be associated with an Engine instance.");
+ }
+
+ name = name || "Unnamed";
+ this.name = this.file = name.toLowerCase();
+ this.engine = engine;
+
+ this._log = Log.repository.getLogger("Sync.Tracker." + name);
+ let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug");
+ this._log.level = Log.Level[level];
+
+ this._score = 0;
+ this._ignored = [];
+ this.ignoreAll = false;
+ this.changedIDs = {};
+ this.loadChangedIDs();
+
+ Svc.Obs.add("weave:engine:start-tracking", this);
+ Svc.Obs.add("weave:engine:stop-tracking", this);
+
+ Svc.Prefs.observe("engine." + this.engine.prefName, this);
+};
+
+Tracker.prototype = {
+ /*
+ * Score can be called as often as desired to decide which engines to sync
+ *
+ * Valid values for score:
+ * -1: Do not sync unless the user specifically requests it (almost disabled)
+ * 0: Nothing has changed
+ * 100: Please sync me ASAP!
+ *
+ * Setting it to other values should (but doesn't currently) throw an exception
+ */
+ get score() {
+ return this._score;
+ },
+
+ set score(value) {
+ this._score = value;
+ Observers.notify("weave:engine:score:updated", this.name);
+ },
+
+ // Should be called by service everytime a sync has been done for an engine
+ resetScore: function () {
+ this._score = 0;
+ },
+
+ persistChangedIDs: true,
+
+ /**
+ * Persist changedIDs to disk at a later date.
+ * Optionally pass a callback to be invoked when the write has occurred.
+ */
+ saveChangedIDs: function (cb) {
+ if (!this.persistChangedIDs) {
+ this._log.debug("Not saving changedIDs.");
+ return;
+ }
+ Utils.namedTimer(function () {
+ this._log.debug("Saving changed IDs to " + this.file);
+ Utils.jsonSave("changes/" + this.file, this, this.changedIDs, cb);
+ }, 1000, this, "_lazySave");
+ },
+
+ loadChangedIDs: function (cb) {
+ Utils.jsonLoad("changes/" + this.file, this, function(json) {
+ if (json && (typeof(json) == "object")) {
+ this.changedIDs = json;
+ } else if (json !== null) {
+ this._log.warn("Changed IDs file " + this.file + " contains non-object value.");
+ json = null;
+ }
+ if (cb) {
+ cb.call(this, json);
+ }
+ });
+ },
+
+ // ignore/unignore specific IDs. Useful for ignoring items that are
+ // being processed, or that shouldn't be synced.
+ // But note: not persisted to disk
+
+ ignoreID: function (id) {
+ this.unignoreID(id);
+ this._ignored.push(id);
+ },
+
+ unignoreID: function (id) {
+ let index = this._ignored.indexOf(id);
+ if (index != -1)
+ this._ignored.splice(index, 1);
+ },
+
+ _saveChangedID(id, when) {
+ this._log.trace(`Adding changed ID: ${id}, ${JSON.stringify(when)}`);
+ this.changedIDs[id] = when;
+ this.saveChangedIDs(this.onSavedChangedIDs);
+ },
+
+ addChangedID: function (id, when) {
+ if (!id) {
+ this._log.warn("Attempted to add undefined ID to tracker");
+ return false;
+ }
+
+ if (this.ignoreAll || this._ignored.includes(id)) {
+ return false;
+ }
+
+ // Default to the current time in seconds if no time is provided.
+ if (when == null) {
+ when = this._now();
+ }
+
+ // Add/update the entry if we have a newer time.
+ if ((this.changedIDs[id] || -Infinity) < when) {
+ this._saveChangedID(id, when);
+ }
+
+ return true;
+ },
+
+ removeChangedID: function (id) {
+ if (!id) {
+ this._log.warn("Attempted to remove undefined ID to tracker");
+ return false;
+ }
+ if (this.ignoreAll || this._ignored.includes(id)) {
+ return false;
+ }
+ if (this.changedIDs[id] != null) {
+ this._log.trace("Removing changed ID " + id);
+ delete this.changedIDs[id];
+ this.saveChangedIDs();
+ }
+ return true;
+ },
+
+ clearChangedIDs: function () {
+ this._log.trace("Clearing changed ID list");
+ this.changedIDs = {};
+ this.saveChangedIDs();
+ },
+
+ _now() {
+ return Date.now() / 1000;
+ },
+
+ _isTracking: false,
+
+ // Override these in your subclasses.
+ startTracking: function () {
+ },
+
+ stopTracking: function () {
+ },
+
+ engineIsEnabled: function () {
+ if (!this.engine) {
+ // Can't tell -- we must be running in a test!
+ return true;
+ }
+ return this.engine.enabled;
+ },
+
+ onEngineEnabledChanged: function (engineEnabled) {
+ if (engineEnabled == this._isTracking) {
+ return;
+ }
+
+ if (engineEnabled) {
+ this.startTracking();
+ this._isTracking = true;
+ } else {
+ this.stopTracking();
+ this._isTracking = false;
+ this.clearChangedIDs();
+ }
+ },
+
+ observe: function (subject, topic, data) {
+ switch (topic) {
+ case "weave:engine:start-tracking":
+ if (!this.engineIsEnabled()) {
+ return;
+ }
+ this._log.trace("Got start-tracking.");
+ if (!this._isTracking) {
+ this.startTracking();
+ this._isTracking = true;
+ }
+ return;
+ case "weave:engine:stop-tracking":
+ this._log.trace("Got stop-tracking.");
+ if (this._isTracking) {
+ this.stopTracking();
+ this._isTracking = false;
+ }
+ return;
+ case "nsPref:changed":
+ if (data == PREFS_BRANCH + "engine." + this.engine.prefName) {
+ this.onEngineEnabledChanged(this.engine.enabled);
+ }
+ return;
+ }
+ }
+};
+
+
+
+/**
+ * The Store serves as the interface between Sync and stored data.
+ *
+ * The name "store" is slightly a misnomer because it doesn't actually "store"
+ * anything. Instead, it serves as a gateway to something that actually does
+ * the "storing."
+ *
+ * The store is responsible for record management inside an engine. It tells
+ * Sync what items are available for Sync, converts items to and from Sync's
+ * record format, and applies records from Sync into changes on the underlying
+ * store.
+ *
+ * Store implementations require a number of functions to be implemented. These
+ * are all documented below.
+ *
+ * For stores that deal with many records or which have expensive store access
+ * routines, it is highly recommended to implement a custom applyIncomingBatch
+ * and/or applyIncoming function on top of the basic APIs.
+ */
+
+this.Store = function Store(name, engine) {
+ if (!engine) {
+ throw new Error("Store must be associated with an Engine instance.");
+ }
+
+ name = name || "Unnamed";
+ this.name = name.toLowerCase();
+ this.engine = engine;
+
+ this._log = Log.repository.getLogger("Sync.Store." + name);
+ let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug");
+ this._log.level = Log.Level[level];
+
+ XPCOMUtils.defineLazyGetter(this, "_timer", function() {
+ return Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ });
+}
+Store.prototype = {
+
+ _sleep: function _sleep(delay) {
+ let cb = Async.makeSyncCallback();
+ this._timer.initWithCallback(cb, delay, Ci.nsITimer.TYPE_ONE_SHOT);
+ Async.waitForSyncCallback(cb);
+ },
+
+ /**
+ * Apply multiple incoming records against the store.
+ *
+ * This is called with a set of incoming records to process. The function
+ * should look at each record, reconcile with the current local state, and
+ * make the local changes required to bring its state in alignment with the
+ * record.
+ *
+ * The default implementation simply iterates over all records and calls
+ * applyIncoming(). Store implementations may overwrite this function
+ * if desired.
+ *
+ * @param records Array of records to apply
+ * @return Array of record IDs which did not apply cleanly
+ */
+ applyIncomingBatch: function (records) {
+ let failed = [];
+ for (let record of records) {
+ try {
+ this.applyIncoming(record);
+ } catch (ex) {
+ if (ex.code == Engine.prototype.eEngineAbortApplyIncoming) {
+ // This kind of exception should have a 'cause' attribute, which is an
+ // originating exception.
+ // ex.cause will carry its stack with it when rethrown.
+ throw ex.cause;
+ }
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.warn("Failed to apply incoming record " + record.id, ex);
+ this.engine._noteApplyFailure();
+ failed.push(record.id);
+ }
+ };
+ return failed;
+ },
+
+ /**
+ * Apply a single record against the store.
+ *
+ * This takes a single record and makes the local changes required so the
+ * local state matches what's in the record.
+ *
+ * The default implementation calls one of remove(), create(), or update()
+ * depending on the state obtained from the store itself. Store
+ * implementations may overwrite this function if desired.
+ *
+ * @param record
+ * Record to apply
+ */
+ applyIncoming: function (record) {
+ if (record.deleted)
+ this.remove(record);
+ else if (!this.itemExists(record.id))
+ this.create(record);
+ else
+ this.update(record);
+ },
+
+ // override these in derived objects
+
+ /**
+ * Create an item in the store from a record.
+ *
+ * This is called by the default implementation of applyIncoming(). If using
+ * applyIncomingBatch(), this won't be called unless your store calls it.
+ *
+ * @param record
+ * The store record to create an item from
+ */
+ create: function (record) {
+ throw "override create in a subclass";
+ },
+
+ /**
+ * Remove an item in the store from a record.
+ *
+ * This is called by the default implementation of applyIncoming(). If using
+ * applyIncomingBatch(), this won't be called unless your store calls it.
+ *
+ * @param record
+ * The store record to delete an item from
+ */
+ remove: function (record) {
+ throw "override remove in a subclass";
+ },
+
+ /**
+ * Update an item from a record.
+ *
+ * This is called by the default implementation of applyIncoming(). If using
+ * applyIncomingBatch(), this won't be called unless your store calls it.
+ *
+ * @param record
+ * The record to use to update an item from
+ */
+ update: function (record) {
+ throw "override update in a subclass";
+ },
+
+ /**
+ * Determine whether a record with the specified ID exists.
+ *
+ * Takes a string record ID and returns a booleans saying whether the record
+ * exists.
+ *
+ * @param id
+ * string record ID
+ * @return boolean indicating whether record exists locally
+ */
+ itemExists: function (id) {
+ throw "override itemExists in a subclass";
+ },
+
+ /**
+ * Create a record from the specified ID.
+ *
+ * If the ID is known, the record should be populated with metadata from
+ * the store. If the ID is not known, the record should be created with the
+ * delete field set to true.
+ *
+ * @param id
+ * string record ID
+ * @param collection
+ * Collection to add record to. This is typically passed into the
+ * constructor for the newly-created record.
+ * @return record type for this engine
+ */
+ createRecord: function (id, collection) {
+ throw "override createRecord in a subclass";
+ },
+
+ /**
+ * Change the ID of a record.
+ *
+ * @param oldID
+ * string old/current record ID
+ * @param newID
+ * string new record ID
+ */
+ changeItemID: function (oldID, newID) {
+ throw "override changeItemID in a subclass";
+ },
+
+ /**
+ * Obtain the set of all known record IDs.
+ *
+ * @return Object with ID strings as keys and values of true. The values
+ * are ignored.
+ */
+ getAllIDs: function () {
+ throw "override getAllIDs in a subclass";
+ },
+
+ /**
+ * Wipe all data in the store.
+ *
+ * This function is called during remote wipes or when replacing local data
+ * with remote data.
+ *
+ * This function should delete all local data that the store is managing. It
+ * can be thought of as clearing out all state and restoring the "new
+ * browser" state.
+ */
+ wipe: function () {
+ throw "override wipe in a subclass";
+ }
+};
+
+this.EngineManager = function EngineManager(service) {
+ this.service = service;
+
+ this._engines = {};
+
+ // This will be populated by Service on startup.
+ this._declined = new Set();
+ this._log = Log.repository.getLogger("Sync.EngineManager");
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.service.engines", "Debug")];
+}
+EngineManager.prototype = {
+ get: function (name) {
+ // Return an array of engines if we have an array of names
+ if (Array.isArray(name)) {
+ let engines = [];
+ name.forEach(function(name) {
+ let engine = this.get(name);
+ if (engine) {
+ engines.push(engine);
+ }
+ }, this);
+ return engines;
+ }
+
+ let engine = this._engines[name];
+ if (!engine) {
+ this._log.debug("Could not get engine: " + name);
+ if (Object.keys) {
+ this._log.debug("Engines are: " + JSON.stringify(Object.keys(this._engines)));
+ }
+ }
+ return engine;
+ },
+
+ getAll: function () {
+ let engines = [];
+ for (let [, engine] of Object.entries(this._engines)) {
+ engines.push(engine);
+ }
+ return engines;
+ },
+
+ /**
+ * N.B., does not pay attention to the declined list.
+ */
+ getEnabled: function () {
+ return this.getAll()
+ .filter((engine) => engine.enabled)
+ .sort((a, b) => a.syncPriority - b.syncPriority);
+ },
+
+ get enabledEngineNames() {
+ return this.getEnabled().map(e => e.name);
+ },
+
+ persistDeclined: function () {
+ Svc.Prefs.set("declinedEngines", [...this._declined].join(","));
+ },
+
+ /**
+ * Returns an array.
+ */
+ getDeclined: function () {
+ return [...this._declined];
+ },
+
+ setDeclined: function (engines) {
+ this._declined = new Set(engines);
+ this.persistDeclined();
+ },
+
+ isDeclined: function (engineName) {
+ return this._declined.has(engineName);
+ },
+
+ /**
+ * Accepts a Set or an array.
+ */
+ decline: function (engines) {
+ for (let e of engines) {
+ this._declined.add(e);
+ }
+ this.persistDeclined();
+ },
+
+ undecline: function (engines) {
+ for (let e of engines) {
+ this._declined.delete(e);
+ }
+ this.persistDeclined();
+ },
+
+ /**
+ * Mark any non-enabled engines as declined.
+ *
+ * This is useful after initial customization during setup.
+ */
+ declineDisabled: function () {
+ for (let e of this.getAll()) {
+ if (!e.enabled) {
+ this._log.debug("Declining disabled engine " + e.name);
+ this._declined.add(e.name);
+ }
+ }
+ this.persistDeclined();
+ },
+
+ /**
+ * Register an Engine to the service. Alternatively, give an array of engine
+ * objects to register.
+ *
+ * @param engineObject
+ * Engine object used to get an instance of the engine
+ * @return The engine object if anything failed
+ */
+ register: function (engineObject) {
+ if (Array.isArray(engineObject)) {
+ return engineObject.map(this.register, this);
+ }
+
+ try {
+ let engine = new engineObject(this.service);
+ let name = engine.name;
+ if (name in this._engines) {
+ this._log.error("Engine '" + name + "' is already registered!");
+ } else {
+ this._engines[name] = engine;
+ }
+ } catch (ex) {
+ let name = engineObject || "";
+ name = name.prototype || "";
+ name = name.name || "";
+
+ this._log.error(`Could not initialize engine ${name}`, ex);
+ return engineObject;
+ }
+ },
+
+ unregister: function (val) {
+ let name = val;
+ if (val instanceof Engine) {
+ name = val.name;
+ }
+ delete this._engines[name];
+ },
+
+ clear: function () {
+ for (let name in this._engines) {
+ delete this._engines[name];
+ }
+ },
+};
+
+this.Engine = function Engine(name, service) {
+ if (!service) {
+ throw new Error("Engine must be associated with a Service instance.");
+ }
+
+ this.Name = name || "Unnamed";
+ this.name = name.toLowerCase();
+ this.service = service;
+
+ this._notify = Utils.notify("weave:engine:");
+ this._log = Log.repository.getLogger("Sync.Engine." + this.Name);
+ let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug");
+ this._log.level = Log.Level[level];
+
+ this._tracker; // initialize tracker to load previously changed IDs
+ this._log.debug("Engine initialized");
+}
+Engine.prototype = {
+ // _storeObj, and _trackerObj should to be overridden in subclasses
+ _storeObj: Store,
+ _trackerObj: Tracker,
+
+ // Local 'constant'.
+ // Signal to the engine that processing further records is pointless.
+ eEngineAbortApplyIncoming: "error.engine.abort.applyincoming",
+
+ // Should we keep syncing if we find a record that cannot be uploaded (ever)?
+ // If this is false, we'll throw, otherwise, we'll ignore the record and
+ // continue. This currently can only happen due to the record being larger
+ // than the record upload limit.
+ allowSkippedRecord: true,
+
+ get prefName() {
+ return this.name;
+ },
+
+ get enabled() {
+ return Svc.Prefs.get("engine." + this.prefName, false);
+ },
+
+ set enabled(val) {
+ Svc.Prefs.set("engine." + this.prefName, !!val);
+ },
+
+ get score() {
+ return this._tracker.score;
+ },
+
+ get _store() {
+ let store = new this._storeObj(this.Name, this);
+ this.__defineGetter__("_store", () => store);
+ return store;
+ },
+
+ get _tracker() {
+ let tracker = new this._trackerObj(this.Name, this);
+ this.__defineGetter__("_tracker", () => tracker);
+ return tracker;
+ },
+
+ sync: function () {
+ if (!this.enabled) {
+ return;
+ }
+
+ if (!this._sync) {
+ throw "engine does not implement _sync method";
+ }
+
+ this._notify("sync", this.name, this._sync)();
+ },
+
+ /**
+ * Get rid of any local meta-data.
+ */
+ resetClient: function () {
+ if (!this._resetClient) {
+ throw "engine does not implement _resetClient method";
+ }
+
+ this._notify("reset-client", this.name, this._resetClient)();
+ },
+
+ _wipeClient: function () {
+ this.resetClient();
+ this._log.debug("Deleting all local data");
+ this._tracker.ignoreAll = true;
+ this._store.wipe();
+ this._tracker.ignoreAll = false;
+ this._tracker.clearChangedIDs();
+ },
+
+ wipeClient: function () {
+ this._notify("wipe-client", this.name, this._wipeClient)();
+ },
+
+ /**
+ * If one exists, initialize and return a validator for this engine (which
+ * must have a `validate(engine)` method that returns a promise to an object
+ * with a getSummary method). Otherwise return null.
+ */
+ getValidator: function () {
+ return null;
+ }
+};
+
+this.SyncEngine = function SyncEngine(name, service) {
+ Engine.call(this, name || "SyncEngine", service);
+
+ this.loadToFetch();
+ this.loadPreviousFailed();
+}
+
+// Enumeration to define approaches to handling bad records.
+// Attached to the constructor to allow use as a kind of static enumeration.
+SyncEngine.kRecoveryStrategy = {
+ ignore: "ignore",
+ retry: "retry",
+ error: "error"
+};
+
+SyncEngine.prototype = {
+ __proto__: Engine.prototype,
+ _recordObj: CryptoWrapper,
+ version: 1,
+
+ // Which sortindex to use when retrieving records for this engine.
+ _defaultSort: undefined,
+
+ // A relative priority to use when computing an order
+ // for engines to be synced. Higher-priority engines
+ // (lower numbers) are synced first.
+ // It is recommended that a unique value be used for each engine,
+ // in order to guarantee a stable sequence.
+ syncPriority: 0,
+
+ // How many records to pull in a single sync. This is primarily to avoid very
+ // long first syncs against profiles with many history records.
+ downloadLimit: null,
+
+ // How many records to pull at one time when specifying IDs. This is to avoid
+ // URI length limitations.
+ guidFetchBatchSize: DEFAULT_GUID_FETCH_BATCH_SIZE,
+ mobileGUIDFetchBatchSize: DEFAULT_MOBILE_GUID_FETCH_BATCH_SIZE,
+
+ // How many records to process in a single batch.
+ applyIncomingBatchSize: DEFAULT_STORE_BATCH_SIZE,
+
+ get storageURL() {
+ return this.service.storageURL;
+ },
+
+ get engineURL() {
+ return this.storageURL + this.name;
+ },
+
+ get cryptoKeysURL() {
+ return this.storageURL + "crypto/keys";
+ },
+
+ get metaURL() {
+ return this.storageURL + "meta/global";
+ },
+
+ get syncID() {
+ // Generate a random syncID if we don't have one
+ let syncID = Svc.Prefs.get(this.name + ".syncID", "");
+ return syncID == "" ? this.syncID = Utils.makeGUID() : syncID;
+ },
+ set syncID(value) {
+ Svc.Prefs.set(this.name + ".syncID", value);
+ },
+
+ /*
+ * lastSync is a timestamp in server time.
+ */
+ get lastSync() {
+ return parseFloat(Svc.Prefs.get(this.name + ".lastSync", "0"));
+ },
+ set lastSync(value) {
+ // Reset the pref in-case it's a number instead of a string
+ Svc.Prefs.reset(this.name + ".lastSync");
+ // Store the value as a string to keep floating point precision
+ Svc.Prefs.set(this.name + ".lastSync", value.toString());
+ },
+ resetLastSync: function () {
+ this._log.debug("Resetting " + this.name + " last sync time");
+ Svc.Prefs.reset(this.name + ".lastSync");
+ Svc.Prefs.set(this.name + ".lastSync", "0");
+ this.lastSyncLocal = 0;
+ },
+
+ get toFetch() {
+ return this._toFetch;
+ },
+ set toFetch(val) {
+ let cb = (error) => {
+ if (error) {
+ this._log.error("Failed to read JSON records to fetch", error);
+ }
+ }
+ // Coerce the array to a string for more efficient comparison.
+ if (val + "" == this._toFetch) {
+ return;
+ }
+ this._toFetch = val;
+ Utils.namedTimer(function () {
+ Utils.jsonSave("toFetch/" + this.name, this, val, cb);
+ }, 0, this, "_toFetchDelay");
+ },
+
+ loadToFetch: function () {
+ // Initialize to empty if there's no file.
+ this._toFetch = [];
+ Utils.jsonLoad("toFetch/" + this.name, this, function(toFetch) {
+ if (toFetch) {
+ this._toFetch = toFetch;
+ }
+ });
+ },
+
+ get previousFailed() {
+ return this._previousFailed;
+ },
+ set previousFailed(val) {
+ let cb = (error) => {
+ if (error) {
+ this._log.error("Failed to set previousFailed", error);
+ } else {
+ this._log.debug("Successfully wrote previousFailed.");
+ }
+ }
+ // Coerce the array to a string for more efficient comparison.
+ if (val + "" == this._previousFailed) {
+ return;
+ }
+ this._previousFailed = val;
+ Utils.namedTimer(function () {
+ Utils.jsonSave("failed/" + this.name, this, val, cb);
+ }, 0, this, "_previousFailedDelay");
+ },
+
+ loadPreviousFailed: function () {
+ // Initialize to empty if there's no file
+ this._previousFailed = [];
+ Utils.jsonLoad("failed/" + this.name, this, function(previousFailed) {
+ if (previousFailed) {
+ this._previousFailed = previousFailed;
+ }
+ });
+ },
+
+ /*
+ * lastSyncLocal is a timestamp in local time.
+ */
+ get lastSyncLocal() {
+ return parseInt(Svc.Prefs.get(this.name + ".lastSyncLocal", "0"), 10);
+ },
+ set lastSyncLocal(value) {
+ // Store as a string because pref can only store C longs as numbers.
+ Svc.Prefs.set(this.name + ".lastSyncLocal", value.toString());
+ },
+
+ /*
+ * Returns a changeset for this sync. Engine implementations can override this
+ * method to bypass the tracker for certain or all changed items.
+ */
+ getChangedIDs: function () {
+ return this._tracker.changedIDs;
+ },
+
+ // Create a new record using the store and add in crypto fields.
+ _createRecord: function (id) {
+ let record = this._store.createRecord(id, this.name);
+ record.id = id;
+ record.collection = this.name;
+ return record;
+ },
+
+ // Any setup that needs to happen at the beginning of each sync.
+ _syncStartup: function () {
+
+ // Determine if we need to wipe on outdated versions
+ let metaGlobal = this.service.recordManager.get(this.metaURL);
+ let engines = metaGlobal.payload.engines || {};
+ let engineData = engines[this.name] || {};
+
+ let needsWipe = false;
+
+ // Assume missing versions are 0 and wipe the server
+ if ((engineData.version || 0) < this.version) {
+ this._log.debug("Old engine data: " + [engineData.version, this.version]);
+
+ // Prepare to clear the server and upload everything
+ needsWipe = true;
+ this.syncID = "";
+
+ // Set the newer version and newly generated syncID
+ engineData.version = this.version;
+ engineData.syncID = this.syncID;
+
+ // Put the new data back into meta/global and mark for upload
+ engines[this.name] = engineData;
+ metaGlobal.payload.engines = engines;
+ metaGlobal.changed = true;
+ }
+ // Don't sync this engine if the server has newer data
+ else if (engineData.version > this.version) {
+ let error = new String("New data: " + [engineData.version, this.version]);
+ error.failureCode = VERSION_OUT_OF_DATE;
+ throw error;
+ }
+ // Changes to syncID mean we'll need to upload everything
+ else if (engineData.syncID != this.syncID) {
+ this._log.debug("Engine syncIDs: " + [engineData.syncID, this.syncID]);
+ this.syncID = engineData.syncID;
+ this._resetClient();
+ };
+
+ // Delete any existing data and reupload on bad version or missing meta.
+ // No crypto component here...? We could regenerate per-collection keys...
+ if (needsWipe) {
+ this.wipeServer();
+ }
+
+ // Save objects that need to be uploaded in this._modified. We also save
+ // the timestamp of this fetch in this.lastSyncLocal. As we successfully
+ // upload objects we remove them from this._modified. If an error occurs
+ // or any objects fail to upload, they will remain in this._modified. At
+ // the end of a sync, or after an error, we add all objects remaining in
+ // this._modified to the tracker.
+ this.lastSyncLocal = Date.now();
+ if (this.lastSync) {
+ this._modified = this.pullNewChanges();
+ } else {
+ this._log.debug("First sync, uploading all items");
+ this._modified = this.pullAllChanges();
+ }
+ // Clear the tracker now. If the sync fails we'll add the ones we failed
+ // to upload back.
+ this._tracker.clearChangedIDs();
+
+ this._log.info(this._modified.count() +
+ " outgoing items pre-reconciliation");
+
+ // Keep track of what to delete at the end of sync
+ this._delete = {};
+ },
+
+ /**
+ * A tiny abstraction to make it easier to test incoming record
+ * application.
+ */
+ itemSource: function () {
+ return new Collection(this.engineURL, this._recordObj, this.service);
+ },
+
+ /**
+ * Process incoming records.
+ * In the most awful and untestable way possible.
+ * This now accepts something that makes testing vaguely less impossible.
+ */
+ _processIncoming: function (newitems) {
+ this._log.trace("Downloading & applying server changes");
+
+ // Figure out how many total items to fetch this sync; do less on mobile.
+ let batchSize = this.downloadLimit || Infinity;
+ let isMobile = (Svc.Prefs.get("client.type") == "mobile");
+
+ if (!newitems) {
+ newitems = this.itemSource();
+ }
+
+ if (this._defaultSort) {
+ newitems.sort = this._defaultSort;
+ }
+
+ if (isMobile) {
+ batchSize = MOBILE_BATCH_SIZE;
+ }
+ newitems.newer = this.lastSync;
+ newitems.full = true;
+ newitems.limit = batchSize;
+
+ // applied => number of items that should be applied.
+ // failed => number of items that failed in this sync.
+ // newFailed => number of items that failed for the first time in this sync.
+ // reconciled => number of items that were reconciled.
+ let count = {applied: 0, failed: 0, newFailed: 0, reconciled: 0};
+ let handled = [];
+ let applyBatch = [];
+ let failed = [];
+ let failedInPreviousSync = this.previousFailed;
+ let fetchBatch = Utils.arrayUnion(this.toFetch, failedInPreviousSync);
+ // Reset previousFailed for each sync since previously failed items may not fail again.
+ this.previousFailed = [];
+
+ // Used (via exceptions) to allow the record handler/reconciliation/etc.
+ // methods to signal that they would like processing of incoming records to
+ // cease.
+ let aborting = undefined;
+
+ function doApplyBatch() {
+ this._tracker.ignoreAll = true;
+ try {
+ failed = failed.concat(this._store.applyIncomingBatch(applyBatch));
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ // Catch any error that escapes from applyIncomingBatch. At present
+ // those will all be abort events.
+ this._log.warn("Got exception, aborting processIncoming", ex);
+ aborting = ex;
+ }
+ this._tracker.ignoreAll = false;
+ applyBatch = [];
+ }
+
+ function doApplyBatchAndPersistFailed() {
+ // Apply remaining batch.
+ if (applyBatch.length) {
+ doApplyBatch.call(this);
+ }
+ // Persist failed items so we refetch them.
+ if (failed.length) {
+ this.previousFailed = Utils.arrayUnion(failed, this.previousFailed);
+ count.failed += failed.length;
+ this._log.debug("Records that failed to apply: " + failed);
+ failed = [];
+ }
+ }
+
+ let key = this.service.collectionKeys.keyForCollection(this.name);
+
+ // Not binding this method to 'this' for performance reasons. It gets
+ // called for every incoming record.
+ let self = this;
+
+ newitems.recordHandler = function(item) {
+ if (aborting) {
+ return;
+ }
+
+ // Grab a later last modified if possible
+ if (self.lastModified == null || item.modified > self.lastModified)
+ self.lastModified = item.modified;
+
+ // Track the collection for the WBO.
+ item.collection = self.name;
+
+ // Remember which records were processed
+ handled.push(item.id);
+
+ try {
+ try {
+ item.decrypt(key);
+ } catch (ex) {
+ if (!Utils.isHMACMismatch(ex)) {
+ throw ex;
+ }
+ let strategy = self.handleHMACMismatch(item, true);
+ if (strategy == SyncEngine.kRecoveryStrategy.retry) {
+ // You only get one retry.
+ try {
+ // Try decrypting again, typically because we've got new keys.
+ self._log.info("Trying decrypt again...");
+ key = self.service.collectionKeys.keyForCollection(self.name);
+ item.decrypt(key);
+ strategy = null;
+ } catch (ex) {
+ if (!Utils.isHMACMismatch(ex)) {
+ throw ex;
+ }
+ strategy = self.handleHMACMismatch(item, false);
+ }
+ }
+
+ switch (strategy) {
+ case null:
+ // Retry succeeded! No further handling.
+ break;
+ case SyncEngine.kRecoveryStrategy.retry:
+ self._log.debug("Ignoring second retry suggestion.");
+ // Fall through to error case.
+ case SyncEngine.kRecoveryStrategy.error:
+ self._log.warn("Error decrypting record", ex);
+ self._noteApplyFailure();
+ failed.push(item.id);
+ return;
+ case SyncEngine.kRecoveryStrategy.ignore:
+ self._log.debug("Ignoring record " + item.id +
+ " with bad HMAC: already handled.");
+ return;
+ }
+ }
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ self._log.warn("Error decrypting record", ex);
+ self._noteApplyFailure();
+ failed.push(item.id);
+ return;
+ }
+
+ let shouldApply;
+ try {
+ shouldApply = self._reconcile(item);
+ } catch (ex) {
+ if (ex.code == Engine.prototype.eEngineAbortApplyIncoming) {
+ self._log.warn("Reconciliation failed: aborting incoming processing.");
+ self._noteApplyFailure();
+ failed.push(item.id);
+ aborting = ex.cause;
+ } else if (!Async.isShutdownException(ex)) {
+ self._log.warn("Failed to reconcile incoming record " + item.id, ex);
+ self._noteApplyFailure();
+ failed.push(item.id);
+ return;
+ } else {
+ throw ex;
+ }
+ }
+
+ if (shouldApply) {
+ count.applied++;
+ applyBatch.push(item);
+ } else {
+ count.reconciled++;
+ self._log.trace("Skipping reconciled incoming item " + item.id);
+ }
+
+ if (applyBatch.length == self.applyIncomingBatchSize) {
+ doApplyBatch.call(self);
+ }
+ self._store._sleep(0);
+ };
+
+ // Only bother getting data from the server if there's new things
+ if (this.lastModified == null || this.lastModified > this.lastSync) {
+ let resp = newitems.getBatched();
+ doApplyBatchAndPersistFailed.call(this);
+ if (!resp.success) {
+ resp.failureCode = ENGINE_DOWNLOAD_FAIL;
+ throw resp;
+ }
+
+ if (aborting) {
+ throw aborting;
+ }
+ }
+
+ // Mobile: check if we got the maximum that we requested; get the rest if so.
+ if (handled.length == newitems.limit) {
+ let guidColl = new Collection(this.engineURL, null, this.service);
+
+ // Sort and limit so that on mobile we only get the last X records.
+ guidColl.limit = this.downloadLimit;
+ guidColl.newer = this.lastSync;
+
+ // index: Orders by the sortindex descending (highest weight first).
+ guidColl.sort = "index";
+
+ let guids = guidColl.get();
+ if (!guids.success)
+ throw guids;
+
+ // Figure out which guids weren't just fetched then remove any guids that
+ // were already waiting and prepend the new ones
+ let extra = Utils.arraySub(guids.obj, handled);
+ if (extra.length > 0) {
+ fetchBatch = Utils.arrayUnion(extra, fetchBatch);
+ this.toFetch = Utils.arrayUnion(extra, this.toFetch);
+ }
+ }
+
+ // Fast-foward the lastSync timestamp since we have stored the
+ // remaining items in toFetch.
+ if (this.lastSync < this.lastModified) {
+ this.lastSync = this.lastModified;
+ }
+
+ // Process any backlog of GUIDs.
+ // At this point we impose an upper limit on the number of items to fetch
+ // in a single request, even for desktop, to avoid hitting URI limits.
+ batchSize = isMobile ? this.mobileGUIDFetchBatchSize :
+ this.guidFetchBatchSize;
+
+ while (fetchBatch.length && !aborting) {
+ // Reuse the original query, but get rid of the restricting params
+ // and batch remaining records.
+ newitems.limit = 0;
+ newitems.newer = 0;
+ newitems.ids = fetchBatch.slice(0, batchSize);
+
+ // Reuse the existing record handler set earlier
+ let resp = newitems.get();
+ if (!resp.success) {
+ resp.failureCode = ENGINE_DOWNLOAD_FAIL;
+ throw resp;
+ }
+
+ // This batch was successfully applied. Not using
+ // doApplyBatchAndPersistFailed() here to avoid writing toFetch twice.
+ fetchBatch = fetchBatch.slice(batchSize);
+ this.toFetch = Utils.arraySub(this.toFetch, newitems.ids);
+ this.previousFailed = Utils.arrayUnion(this.previousFailed, failed);
+ if (failed.length) {
+ count.failed += failed.length;
+ this._log.debug("Records that failed to apply: " + failed);
+ }
+ failed = [];
+
+ if (aborting) {
+ throw aborting;
+ }
+
+ if (this.lastSync < this.lastModified) {
+ this.lastSync = this.lastModified;
+ }
+ }
+
+ // Apply remaining items.
+ doApplyBatchAndPersistFailed.call(this);
+
+ count.newFailed = this.previousFailed.reduce((count, engine) => {
+ if (failedInPreviousSync.indexOf(engine) == -1) {
+ count++;
+ this._noteApplyNewFailure();
+ }
+ return count;
+ }, 0);
+ count.succeeded = Math.max(0, count.applied - count.failed);
+ this._log.info(["Records:",
+ count.applied, "applied,",
+ count.succeeded, "successfully,",
+ count.failed, "failed to apply,",
+ count.newFailed, "newly failed to apply,",
+ count.reconciled, "reconciled."].join(" "));
+ Observers.notify("weave:engine:sync:applied", count, this.name);
+ },
+
+ _noteApplyFailure: function () {
+ // here would be a good place to record telemetry...
+ },
+
+ _noteApplyNewFailure: function () {
+ // here would be a good place to record telemetry...
+ },
+
+ /**
+ * Find a GUID of an item that is a duplicate of the incoming item but happens
+ * to have a different GUID
+ *
+ * @return GUID of the similar item; falsy otherwise
+ */
+ _findDupe: function (item) {
+ // By default, assume there's no dupe items for the engine
+ },
+
+ // Called when the server has a record marked as deleted, but locally we've
+ // changed it more recently than the deletion. If we return false, the
+ // record will be deleted locally. If we return true, we'll reupload the
+ // record to the server -- any extra work that's needed as part of this
+ // process should be done at this point (such as mark the record's parent
+ // for reuploading in the case of bookmarks).
+ _shouldReviveRemotelyDeletedRecord(remoteItem) {
+ return true;
+ },
+
+ _deleteId: function (id) {
+ this._tracker.removeChangedID(id);
+
+ // Remember this id to delete at the end of sync
+ if (this._delete.ids == null)
+ this._delete.ids = [id];
+ else
+ this._delete.ids.push(id);
+ },
+
+ _switchItemToDupe(localDupeGUID, incomingItem) {
+ // The local, duplicate ID is always deleted on the server.
+ this._deleteId(localDupeGUID);
+
+ // We unconditionally change the item's ID in case the engine knows of
+ // an item but doesn't expose it through itemExists. If the API
+ // contract were stronger, this could be changed.
+ this._log.debug("Switching local ID to incoming: " + localDupeGUID + " -> " +
+ incomingItem.id);
+ this._store.changeItemID(localDupeGUID, incomingItem.id);
+ },
+
+ /**
+ * Reconcile incoming record with local state.
+ *
+ * This function essentially determines whether to apply an incoming record.
+ *
+ * @param item
+ * Record from server to be tested for application.
+ * @return boolean
+ * Truthy if incoming record should be applied. False if not.
+ */
+ _reconcile: function (item) {
+ if (this._log.level <= Log.Level.Trace) {
+ this._log.trace("Incoming: " + item);
+ }
+
+ // We start reconciling by collecting a bunch of state. We do this here
+ // because some state may change during the course of this function and we
+ // need to operate on the original values.
+ let existsLocally = this._store.itemExists(item.id);
+ let locallyModified = this._modified.has(item.id);
+
+ // TODO Handle clock drift better. Tracked in bug 721181.
+ let remoteAge = AsyncResource.serverTime - item.modified;
+ let localAge = locallyModified ?
+ (Date.now() / 1000 - this._modified.getModifiedTimestamp(item.id)) : null;
+ let remoteIsNewer = remoteAge < localAge;
+
+ this._log.trace("Reconciling " + item.id + ". exists=" +
+ existsLocally + "; modified=" + locallyModified +
+ "; local age=" + localAge + "; incoming age=" +
+ remoteAge);
+
+ // We handle deletions first so subsequent logic doesn't have to check
+ // deleted flags.
+ if (item.deleted) {
+ // If the item doesn't exist locally, there is nothing for us to do. We
+ // can't check for duplicates because the incoming record has no data
+ // which can be used for duplicate detection.
+ if (!existsLocally) {
+ this._log.trace("Ignoring incoming item because it was deleted and " +
+ "the item does not exist locally.");
+ return false;
+ }
+
+ // We decide whether to process the deletion by comparing the record
+ // ages. If the item is not modified locally, the remote side wins and
+ // the deletion is processed. If it is modified locally, we take the
+ // newer record.
+ if (!locallyModified) {
+ this._log.trace("Applying incoming delete because the local item " +
+ "exists and isn't modified.");
+ return true;
+ }
+ this._log.trace("Incoming record is deleted but we had local changes.");
+
+ if (remoteIsNewer) {
+ this._log.trace("Remote record is newer -- deleting local record.");
+ return true;
+ }
+ // If the local record is newer, we defer to individual engines for
+ // how to handle this. By default, we revive the record.
+ let willRevive = this._shouldReviveRemotelyDeletedRecord(item);
+ this._log.trace("Local record is newer -- reviving? " + willRevive);
+
+ return !willRevive;
+ }
+
+ // At this point the incoming record is not for a deletion and must have
+ // data. If the incoming record does not exist locally, we check for a local
+ // duplicate existing under a different ID. The default implementation of
+ // _findDupe() is empty, so engines have to opt in to this functionality.
+ //
+ // If we find a duplicate, we change the local ID to the incoming ID and we
+ // refresh the metadata collected above. See bug 710448 for the history
+ // of this logic.
+ if (!existsLocally) {
+ let localDupeGUID = this._findDupe(item);
+ if (localDupeGUID) {
+ this._log.trace("Local item " + localDupeGUID + " is a duplicate for " +
+ "incoming item " + item.id);
+
+ // The current API contract does not mandate that the ID returned by
+ // _findDupe() actually exists. Therefore, we have to perform this
+ // check.
+ existsLocally = this._store.itemExists(localDupeGUID);
+
+ // If the local item was modified, we carry its metadata forward so
+ // appropriate reconciling can be performed.
+ if (this._modified.has(localDupeGUID)) {
+ locallyModified = true;
+ localAge = this._tracker._now() - this._modified.getModifiedTimestamp(localDupeGUID);
+ remoteIsNewer = remoteAge < localAge;
+
+ this._modified.swap(localDupeGUID, item.id);
+ } else {
+ locallyModified = false;
+ localAge = null;
+ }
+
+ // Tell the engine to do whatever it needs to switch the items.
+ this._switchItemToDupe(localDupeGUID, item);
+
+ this._log.debug("Local item after duplication: age=" + localAge +
+ "; modified=" + locallyModified + "; exists=" +
+ existsLocally);
+ } else {
+ this._log.trace("No duplicate found for incoming item: " + item.id);
+ }
+ }
+
+ // At this point we've performed duplicate detection. But, nothing here
+ // should depend on duplicate detection as the above should have updated
+ // state seamlessly.
+
+ if (!existsLocally) {
+ // If the item doesn't exist locally and we have no local modifications
+ // to the item (implying that it was not deleted), always apply the remote
+ // item.
+ if (!locallyModified) {
+ this._log.trace("Applying incoming because local item does not exist " +
+ "and was not deleted.");
+ return true;
+ }
+
+ // If the item was modified locally but isn't present, it must have
+ // been deleted. If the incoming record is younger, we restore from
+ // that record.
+ if (remoteIsNewer) {
+ this._log.trace("Applying incoming because local item was deleted " +
+ "before the incoming item was changed.");
+ this._modified.delete(item.id);
+ return true;
+ }
+
+ this._log.trace("Ignoring incoming item because the local item's " +
+ "deletion is newer.");
+ return false;
+ }
+
+ // If the remote and local records are the same, there is nothing to be
+ // done, so we don't do anything. In the ideal world, this logic wouldn't
+ // be here and the engine would take a record and apply it. The reason we
+ // want to defer this logic is because it would avoid a redundant and
+ // possibly expensive dip into the storage layer to query item state.
+ // This should get addressed in the async rewrite, so we ignore it for now.
+ let localRecord = this._createRecord(item.id);
+ let recordsEqual = Utils.deepEquals(item.cleartext,
+ localRecord.cleartext);
+
+ // If the records are the same, we don't need to do anything. This does
+ // potentially throw away a local modification time. But, if the records
+ // are the same, does it matter?
+ if (recordsEqual) {
+ this._log.trace("Ignoring incoming item because the local item is " +
+ "identical.");
+
+ this._modified.delete(item.id);
+ return false;
+ }
+
+ // At this point the records are different.
+
+ // If we have no local modifications, always take the server record.
+ if (!locallyModified) {
+ this._log.trace("Applying incoming record because no local conflicts.");
+ return true;
+ }
+
+ // At this point, records are different and the local record is modified.
+ // We resolve conflicts by record age, where the newest one wins. This does
+ // result in data loss and should be handled by giving the engine an
+ // opportunity to merge the records. Bug 720592 tracks this feature.
+ this._log.warn("DATA LOSS: Both local and remote changes to record: " +
+ item.id);
+ return remoteIsNewer;
+ },
+
+ // Upload outgoing records.
+ _uploadOutgoing: function () {
+ this._log.trace("Uploading local changes to server.");
+
+ let modifiedIDs = this._modified.ids();
+ if (modifiedIDs.length) {
+ this._log.trace("Preparing " + modifiedIDs.length +
+ " outgoing records");
+
+ let counts = { sent: modifiedIDs.length, failed: 0 };
+
+ // collection we'll upload
+ let up = new Collection(this.engineURL, null, this.service);
+
+ let failed = [];
+ let successful = [];
+ let handleResponse = (resp, batchOngoing = false) => {
+ // Note: We don't want to update this.lastSync, or this._modified until
+ // the batch is complete, however we want to remember success/failure
+ // indicators for when that happens.
+ if (!resp.success) {
+ this._log.debug("Uploading records failed: " + resp);
+ resp.failureCode = resp.status == 412 ? ENGINE_BATCH_INTERRUPTED : ENGINE_UPLOAD_FAIL;
+ throw resp;
+ }
+
+ // Update server timestamp from the upload.
+ failed = failed.concat(Object.keys(resp.obj.failed));
+ successful = successful.concat(resp.obj.success);
+
+ if (batchOngoing) {
+ // Nothing to do yet
+ return;
+ }
+ // Advance lastSync since we've finished the batch.
+ let modified = resp.headers["x-weave-timestamp"];
+ if (modified > this.lastSync) {
+ this.lastSync = modified;
+ }
+ if (failed.length && this._log.level <= Log.Level.Debug) {
+ this._log.debug("Records that will be uploaded again because "
+ + "the server couldn't store them: "
+ + failed.join(", "));
+ }
+
+ counts.failed += failed.length;
+
+ for (let id of successful) {
+ this._modified.delete(id);
+ }
+
+ this._onRecordsWritten(successful, failed);
+
+ // clear for next batch
+ failed.length = 0;
+ successful.length = 0;
+ };
+
+ let postQueue = up.newPostQueue(this._log, this.lastSync, handleResponse);
+
+ for (let id of modifiedIDs) {
+ let out;
+ let ok = false;
+ try {
+ out = this._createRecord(id);
+ if (this._log.level <= Log.Level.Trace)
+ this._log.trace("Outgoing: " + out);
+
+ out.encrypt(this.service.collectionKeys.keyForCollection(this.name));
+ ok = true;
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.warn("Error creating record", ex);
+ }
+ if (ok) {
+ let { enqueued, error } = postQueue.enqueue(out);
+ if (!enqueued) {
+ ++counts.failed;
+ if (!this.allowSkippedRecord) {
+ throw error;
+ }
+ }
+ }
+ this._store._sleep(0);
+ }
+ postQueue.flush(true);
+ Observers.notify("weave:engine:sync:uploaded", counts, this.name);
+ }
+ },
+
+ _onRecordsWritten(succeeded, failed) {
+ // Implement this method to take specific actions against successfully
+ // uploaded records and failed records.
+ },
+
+ // Any cleanup necessary.
+ // Save the current snapshot so as to calculate changes at next sync
+ _syncFinish: function () {
+ this._log.trace("Finishing up sync");
+ this._tracker.resetScore();
+
+ let doDelete = Utils.bind2(this, function(key, val) {
+ let coll = new Collection(this.engineURL, this._recordObj, this.service);
+ coll[key] = val;
+ coll.delete();
+ });
+
+ for (let [key, val] of Object.entries(this._delete)) {
+ // Remove the key for future uses
+ delete this._delete[key];
+
+ // Send a simple delete for the property
+ if (key != "ids" || val.length <= 100)
+ doDelete(key, val);
+ else {
+ // For many ids, split into chunks of at most 100
+ while (val.length > 0) {
+ doDelete(key, val.slice(0, 100));
+ val = val.slice(100);
+ }
+ }
+ }
+ },
+
+ _syncCleanup: function () {
+ if (!this._modified) {
+ return;
+ }
+
+ // Mark failed WBOs as changed again so they are reuploaded next time.
+ this.trackRemainingChanges();
+ this._modified.clear();
+ },
+
+ _sync: function () {
+ try {
+ this._syncStartup();
+ Observers.notify("weave:engine:sync:status", "process-incoming");
+ this._processIncoming();
+ Observers.notify("weave:engine:sync:status", "upload-outgoing");
+ this._uploadOutgoing();
+ this._syncFinish();
+ } finally {
+ this._syncCleanup();
+ }
+ },
+
+ canDecrypt: function () {
+ // Report failure even if there's nothing to decrypt
+ let canDecrypt = false;
+
+ // Fetch the most recently uploaded record and try to decrypt it
+ let test = new Collection(this.engineURL, this._recordObj, this.service);
+ test.limit = 1;
+ test.sort = "newest";
+ test.full = true;
+
+ let key = this.service.collectionKeys.keyForCollection(this.name);
+ test.recordHandler = function recordHandler(record) {
+ record.decrypt(key);
+ canDecrypt = true;
+ }.bind(this);
+
+ // Any failure fetching/decrypting will just result in false
+ try {
+ this._log.trace("Trying to decrypt a record from the server..");
+ test.get();
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.debug("Failed test decrypt", ex);
+ }
+
+ return canDecrypt;
+ },
+
+ _resetClient: function () {
+ this.resetLastSync();
+ this.previousFailed = [];
+ this.toFetch = [];
+ },
+
+ wipeServer: function () {
+ let response = this.service.resource(this.engineURL).delete();
+ if (response.status != 200 && response.status != 404) {
+ throw response;
+ }
+ this._resetClient();
+ },
+
+ removeClientData: function () {
+ // Implement this method in engines that store client specific data
+ // on the server.
+ },
+
+ /*
+ * Decide on (and partially effect) an error-handling strategy.
+ *
+ * Asks the Service to respond to an HMAC error, which might result in keys
+ * being downloaded. That call returns true if an action which might allow a
+ * retry to occur.
+ *
+ * If `mayRetry` is truthy, and the Service suggests a retry,
+ * handleHMACMismatch returns kRecoveryStrategy.retry. Otherwise, it returns
+ * kRecoveryStrategy.error.
+ *
+ * Subclasses of SyncEngine can override this method to allow for different
+ * behavior -- e.g., to delete and ignore erroneous entries.
+ *
+ * All return values will be part of the kRecoveryStrategy enumeration.
+ */
+ handleHMACMismatch: function (item, mayRetry) {
+ // By default we either try again, or bail out noisily.
+ return (this.service.handleHMACEvent() && mayRetry) ?
+ SyncEngine.kRecoveryStrategy.retry :
+ SyncEngine.kRecoveryStrategy.error;
+ },
+
+ /**
+ * Returns a changeset containing all items in the store. The default
+ * implementation returns a changeset with timestamps from long ago, to
+ * ensure we always use the remote version if one exists.
+ *
+ * This function is only called for the first sync. Subsequent syncs call
+ * `pullNewChanges`.
+ *
+ * @return A `Changeset` object.
+ */
+ pullAllChanges() {
+ let changeset = new Changeset();
+ for (let id in this._store.getAllIDs()) {
+ changeset.set(id, 0);
+ }
+ return changeset;
+ },
+
+ /*
+ * Returns a changeset containing entries for all currently tracked items.
+ * The default implementation returns a changeset with timestamps indicating
+ * when the item was added to the tracker.
+ *
+ * @return A `Changeset` object.
+ */
+ pullNewChanges() {
+ return new Changeset(this.getChangedIDs());
+ },
+
+ /**
+ * Adds all remaining changeset entries back to the tracker, typically for
+ * items that failed to upload. This method is called at the end of each sync.
+ *
+ */
+ trackRemainingChanges() {
+ for (let [id, change] of this._modified.entries()) {
+ this._tracker.addChangedID(id, change);
+ }
+ },
+};
+
+/**
+ * A changeset is created for each sync in `Engine::get{Changed, All}IDs`,
+ * and stores opaque change data for tracked IDs. The default implementation
+ * only records timestamps, though engines can extend this to store additional
+ * data for each entry.
+ */
+class Changeset {
+ // Creates a changeset with an initial set of tracked entries.
+ constructor(changes = {}) {
+ this.changes = changes;
+ }
+
+ // Returns the last modified time, in seconds, for an entry in the changeset.
+ // `id` is guaranteed to be in the set.
+ getModifiedTimestamp(id) {
+ return this.changes[id];
+ }
+
+ // Adds a change for a tracked ID to the changeset.
+ set(id, change) {
+ this.changes[id] = change;
+ }
+
+ // Indicates whether an entry is in the changeset.
+ has(id) {
+ return id in this.changes;
+ }
+
+ // Deletes an entry from the changeset. Used to clean up entries for
+ // reconciled and successfully uploaded records.
+ delete(id) {
+ delete this.changes[id];
+ }
+
+ // Swaps two entries in the changeset. Used when reconciling duplicates that
+ // have local changes.
+ swap(oldID, newID) {
+ this.changes[newID] = this.changes[oldID];
+ delete this.changes[oldID];
+ }
+
+ // Returns an array of all tracked IDs in this changeset.
+ ids() {
+ return Object.keys(this.changes);
+ }
+
+ // Returns an array of `[id, change]` tuples. Used to repopulate the tracker
+ // with entries for failed uploads at the end of a sync.
+ entries() {
+ return Object.entries(this.changes);
+ }
+
+ // Returns the number of entries in this changeset.
+ count() {
+ return this.ids().length;
+ }
+
+ // Clears the changeset.
+ clear() {
+ this.changes = {};
+ }
+}
diff --git a/services/sync/modules/engines/addons.js b/services/sync/modules/engines/addons.js
new file mode 100644
index 000000000..01dab58d1
--- /dev/null
+++ b/services/sync/modules/engines/addons.js
@@ -0,0 +1,813 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*
+ * This file defines the add-on sync functionality.
+ *
+ * There are currently a number of known limitations:
+ * - We only sync XPI extensions and themes available from addons.mozilla.org.
+ * We hope to expand support for other add-ons eventually.
+ * - We only attempt syncing of add-ons between applications of the same type.
+ * This means add-ons will not synchronize between Firefox desktop and
+ * Firefox mobile, for example. This is because of significant add-on
+ * incompatibility between application types.
+ *
+ * Add-on records exist for each known {add-on, app-id} pair in the Sync client
+ * set. Each record has a randomly chosen GUID. The records then contain
+ * basic metadata about the add-on.
+ *
+ * We currently synchronize:
+ *
+ * - Installations
+ * - Uninstallations
+ * - User enabling and disabling
+ *
+ * Synchronization is influenced by the following preferences:
+ *
+ * - services.sync.addons.ignoreUserEnabledChanges
+ * - services.sync.addons.trustedSourceHostnames
+ *
+ * and also influenced by whether addons have repository caching enabled and
+ * whether they allow installation of addons from insecure options (both of
+ * which are themselves influenced by the "extensions." pref branch)
+ *
+ * See the documentation in services-sync.js for the behavior of these prefs.
+ */
+"use strict";
+
+var {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-sync/addonutils.js");
+Cu.import("resource://services-sync/addonsreconciler.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/collection_validator.js");
+Cu.import("resource://services-common/async.js");
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "AddonManager",
+ "resource://gre/modules/AddonManager.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "AddonRepository",
+ "resource://gre/modules/addons/AddonRepository.jsm");
+
+this.EXPORTED_SYMBOLS = ["AddonsEngine", "AddonValidator"];
+
+// 7 days in milliseconds.
+const PRUNE_ADDON_CHANGES_THRESHOLD = 60 * 60 * 24 * 7 * 1000;
+
+/**
+ * AddonRecord represents the state of an add-on in an application.
+ *
+ * Each add-on has its own record for each application ID it is installed
+ * on.
+ *
+ * The ID of add-on records is a randomly-generated GUID. It is random instead
+ * of deterministic so the URIs of the records cannot be guessed and so
+ * compromised server credentials won't result in disclosure of the specific
+ * add-ons present in a Sync account.
+ *
+ * The record contains the following fields:
+ *
+ * addonID
+ * ID of the add-on. This correlates to the "id" property on an Addon type.
+ *
+ * applicationID
+ * The application ID this record is associated with.
+ *
+ * enabled
+ * Boolean stating whether add-on is enabled or disabled by the user.
+ *
+ * source
+ * String indicating where an add-on is from. Currently, we only support
+ * the value "amo" which indicates that the add-on came from the official
+ * add-ons repository, addons.mozilla.org. In the future, we may support
+ * installing add-ons from other sources. This provides a future-compatible
+ * mechanism for clients to only apply records they know how to handle.
+ */
+function AddonRecord(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+AddonRecord.prototype = {
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Record.Addon"
+};
+
+Utils.deferGetSet(AddonRecord, "cleartext", ["addonID",
+ "applicationID",
+ "enabled",
+ "source"]);
+
+/**
+ * The AddonsEngine handles synchronization of add-ons between clients.
+ *
+ * The engine maintains an instance of an AddonsReconciler, which is the entity
+ * maintaining state for add-ons. It provides the history and tracking APIs
+ * that AddonManager doesn't.
+ *
+ * The engine instance overrides a handful of functions on the base class. The
+ * rationale for each is documented by that function.
+ */
+this.AddonsEngine = function AddonsEngine(service) {
+ SyncEngine.call(this, "Addons", service);
+
+ this._reconciler = new AddonsReconciler();
+}
+AddonsEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _storeObj: AddonsStore,
+ _trackerObj: AddonsTracker,
+ _recordObj: AddonRecord,
+ version: 1,
+
+ syncPriority: 5,
+
+ _reconciler: null,
+
+ /**
+ * Override parent method to find add-ons by their public ID, not Sync GUID.
+ */
+ _findDupe: function _findDupe(item) {
+ let id = item.addonID;
+
+ // The reconciler should have been updated at the top of the sync, so we
+ // can assume it is up to date when this function is called.
+ let addons = this._reconciler.addons;
+ if (!(id in addons)) {
+ return null;
+ }
+
+ let addon = addons[id];
+ if (addon.guid != item.id) {
+ return addon.guid;
+ }
+
+ return null;
+ },
+
+ /**
+ * Override getChangedIDs to pull in tracker changes plus changes from the
+ * reconciler log.
+ */
+ getChangedIDs: function getChangedIDs() {
+ let changes = {};
+ for (let [id, modified] of Object.entries(this._tracker.changedIDs)) {
+ changes[id] = modified;
+ }
+
+ let lastSyncDate = new Date(this.lastSync * 1000);
+
+ // The reconciler should have been refreshed at the beginning of a sync and
+ // we assume this function is only called from within a sync.
+ let reconcilerChanges = this._reconciler.getChangesSinceDate(lastSyncDate);
+ let addons = this._reconciler.addons;
+ for (let change of reconcilerChanges) {
+ let changeTime = change[0];
+ let id = change[2];
+
+ if (!(id in addons)) {
+ continue;
+ }
+
+ // Keep newest modified time.
+ if (id in changes && changeTime < changes[id]) {
+ continue;
+ }
+
+ if (!this.isAddonSyncable(addons[id])) {
+ continue;
+ }
+
+ this._log.debug("Adding changed add-on from changes log: " + id);
+ let addon = addons[id];
+ changes[addon.guid] = changeTime.getTime() / 1000;
+ }
+
+ return changes;
+ },
+
+ /**
+ * Override start of sync function to refresh reconciler.
+ *
+ * Many functions in this class assume the reconciler is refreshed at the
+ * top of a sync. If this ever changes, those functions should be revisited.
+ *
+ * Technically speaking, we don't need to refresh the reconciler on every
+ * sync since it is installed as an AddonManager listener. However, add-ons
+ * are complicated and we force a full refresh, just in case the listeners
+ * missed something.
+ */
+ _syncStartup: function _syncStartup() {
+ // We refresh state before calling parent because syncStartup in the parent
+ // looks for changed IDs, which is dependent on add-on state being up to
+ // date.
+ this._refreshReconcilerState();
+
+ SyncEngine.prototype._syncStartup.call(this);
+ },
+
+ /**
+ * Override end of sync to perform a little housekeeping on the reconciler.
+ *
+ * We prune changes to prevent the reconciler state from growing without
+ * bound. Even if it grows unbounded, there would have to be many add-on
+ * changes (thousands) for it to slow things down significantly. This is
+ * highly unlikely to occur. Still, we exercise defense just in case.
+ */
+ _syncCleanup: function _syncCleanup() {
+ let ms = 1000 * this.lastSync - PRUNE_ADDON_CHANGES_THRESHOLD;
+ this._reconciler.pruneChangesBeforeDate(new Date(ms));
+
+ SyncEngine.prototype._syncCleanup.call(this);
+ },
+
+ /**
+ * Helper function to ensure reconciler is up to date.
+ *
+ * This will synchronously load the reconciler's state from the file
+ * system (if needed) and refresh the state of the reconciler.
+ */
+ _refreshReconcilerState: function _refreshReconcilerState() {
+ this._log.debug("Refreshing reconciler state");
+ let cb = Async.makeSpinningCallback();
+ this._reconciler.refreshGlobalState(cb);
+ cb.wait();
+ },
+
+ isAddonSyncable(addon, ignoreRepoCheck) {
+ return this._store.isAddonSyncable(addon, ignoreRepoCheck);
+ }
+};
+
+/**
+ * This is the primary interface between Sync and the Addons Manager.
+ *
+ * In addition to the core store APIs, we provide convenience functions to wrap
+ * Add-on Manager APIs with Sync-specific semantics.
+ */
+function AddonsStore(name, engine) {
+ Store.call(this, name, engine);
+}
+AddonsStore.prototype = {
+ __proto__: Store.prototype,
+
+ // Define the add-on types (.type) that we support.
+ _syncableTypes: ["extension", "theme"],
+
+ _extensionsPrefs: new Preferences("extensions."),
+
+ get reconciler() {
+ return this.engine._reconciler;
+ },
+
+ /**
+ * Override applyIncoming to filter out records we can't handle.
+ */
+ applyIncoming: function applyIncoming(record) {
+ // The fields we look at aren't present when the record is deleted.
+ if (!record.deleted) {
+ // Ignore records not belonging to our application ID because that is the
+ // current policy.
+ if (record.applicationID != Services.appinfo.ID) {
+ this._log.info("Ignoring incoming record from other App ID: " +
+ record.id);
+ return;
+ }
+
+ // Ignore records that aren't from the official add-on repository, as that
+ // is our current policy.
+ if (record.source != "amo") {
+ this._log.info("Ignoring unknown add-on source (" + record.source + ")" +
+ " for " + record.id);
+ return;
+ }
+ }
+
+ // Ignore incoming records for which an existing non-syncable addon
+ // exists.
+ let existingMeta = this.reconciler.addons[record.addonID];
+ if (existingMeta && !this.isAddonSyncable(existingMeta)) {
+ this._log.info("Ignoring incoming record for an existing but non-syncable addon", record.addonID);
+ return;
+ }
+
+ Store.prototype.applyIncoming.call(this, record);
+ },
+
+
+ /**
+ * Provides core Store API to create/install an add-on from a record.
+ */
+ create: function create(record) {
+ let cb = Async.makeSpinningCallback();
+ AddonUtils.installAddons([{
+ id: record.addonID,
+ syncGUID: record.id,
+ enabled: record.enabled,
+ requireSecureURI: this._extensionsPrefs.get("install.requireSecureOrigin", true),
+ }], cb);
+
+ // This will throw if there was an error. This will get caught by the sync
+ // engine and the record will try to be applied later.
+ let results = cb.wait();
+
+ if (results.skipped.includes(record.addonID)) {
+ this._log.info("Add-on skipped: " + record.addonID);
+ // Just early-return for skipped addons - we don't want to arrange to
+ // try again next time because the condition that caused up to skip
+ // will remain true for this addon forever.
+ return;
+ }
+
+ let addon;
+ for (let a of results.addons) {
+ if (a.id == record.addonID) {
+ addon = a;
+ break;
+ }
+ }
+
+ // This should never happen, but is present as a fail-safe.
+ if (!addon) {
+ throw new Error("Add-on not found after install: " + record.addonID);
+ }
+
+ this._log.info("Add-on installed: " + record.addonID);
+ },
+
+ /**
+ * Provides core Store API to remove/uninstall an add-on from a record.
+ */
+ remove: function remove(record) {
+ // If this is called, the payload is empty, so we have to find by GUID.
+ let addon = this.getAddonByGUID(record.id);
+ if (!addon) {
+ // We don't throw because if the add-on could not be found then we assume
+ // it has already been uninstalled and there is nothing for this function
+ // to do.
+ return;
+ }
+
+ this._log.info("Uninstalling add-on: " + addon.id);
+ let cb = Async.makeSpinningCallback();
+ AddonUtils.uninstallAddon(addon, cb);
+ cb.wait();
+ },
+
+ /**
+ * Provides core Store API to update an add-on from a record.
+ */
+ update: function update(record) {
+ let addon = this.getAddonByID(record.addonID);
+
+ // update() is called if !this.itemExists. And, since itemExists consults
+ // the reconciler only, we need to take care of some corner cases.
+ //
+ // First, the reconciler could know about an add-on that was uninstalled
+ // and no longer present in the add-ons manager.
+ if (!addon) {
+ this.create(record);
+ return;
+ }
+
+ // It's also possible that the add-on is non-restartless and has pending
+ // install/uninstall activity.
+ //
+ // We wouldn't get here if the incoming record was for a deletion. So,
+ // check for pending uninstall and cancel if necessary.
+ if (addon.pendingOperations & AddonManager.PENDING_UNINSTALL) {
+ addon.cancelUninstall();
+
+ // We continue with processing because there could be state or ID change.
+ }
+
+ let cb = Async.makeSpinningCallback();
+ this.updateUserDisabled(addon, !record.enabled, cb);
+ cb.wait();
+ },
+
+ /**
+ * Provide core Store API to determine if a record exists.
+ */
+ itemExists: function itemExists(guid) {
+ let addon = this.reconciler.getAddonStateFromSyncGUID(guid);
+
+ return !!addon;
+ },
+
+ /**
+ * Create an add-on record from its GUID.
+ *
+ * @param guid
+ * Add-on GUID (from extensions DB)
+ * @param collection
+ * Collection to add record to.
+ *
+ * @return AddonRecord instance
+ */
+ createRecord: function createRecord(guid, collection) {
+ let record = new AddonRecord(collection, guid);
+ record.applicationID = Services.appinfo.ID;
+
+ let addon = this.reconciler.getAddonStateFromSyncGUID(guid);
+
+ // If we don't know about this GUID or if it has been uninstalled, we mark
+ // the record as deleted.
+ if (!addon || !addon.installed) {
+ record.deleted = true;
+ return record;
+ }
+
+ record.modified = addon.modified.getTime() / 1000;
+
+ record.addonID = addon.id;
+ record.enabled = addon.enabled;
+
+ // This needs to be dynamic when add-ons don't come from AddonRepository.
+ record.source = "amo";
+
+ return record;
+ },
+
+ /**
+ * Changes the id of an add-on.
+ *
+ * This implements a core API of the store.
+ */
+ changeItemID: function changeItemID(oldID, newID) {
+ // We always update the GUID in the reconciler because it will be
+ // referenced later in the sync process.
+ let state = this.reconciler.getAddonStateFromSyncGUID(oldID);
+ if (state) {
+ state.guid = newID;
+ let cb = Async.makeSpinningCallback();
+ this.reconciler.saveState(null, cb);
+ cb.wait();
+ }
+
+ let addon = this.getAddonByGUID(oldID);
+ if (!addon) {
+ this._log.debug("Cannot change item ID (" + oldID + ") in Add-on " +
+ "Manager because old add-on not present: " + oldID);
+ return;
+ }
+
+ addon.syncGUID = newID;
+ },
+
+ /**
+ * Obtain the set of all syncable add-on Sync GUIDs.
+ *
+ * This implements a core Store API.
+ */
+ getAllIDs: function getAllIDs() {
+ let ids = {};
+
+ let addons = this.reconciler.addons;
+ for (let id in addons) {
+ let addon = addons[id];
+ if (this.isAddonSyncable(addon)) {
+ ids[addon.guid] = true;
+ }
+ }
+
+ return ids;
+ },
+
+ /**
+ * Wipe engine data.
+ *
+ * This uninstalls all syncable addons from the application. In case of
+ * error, it logs the error and keeps trying with other add-ons.
+ */
+ wipe: function wipe() {
+ this._log.info("Processing wipe.");
+
+ this.engine._refreshReconcilerState();
+
+ // We only wipe syncable add-ons. Wipe is a Sync feature not a security
+ // feature.
+ for (let guid in this.getAllIDs()) {
+ let addon = this.getAddonByGUID(guid);
+ if (!addon) {
+ this._log.debug("Ignoring add-on because it couldn't be obtained: " +
+ guid);
+ continue;
+ }
+
+ this._log.info("Uninstalling add-on as part of wipe: " + addon.id);
+ Utils.catch.call(this, () => addon.uninstall())();
+ }
+ },
+
+ /***************************************************************************
+ * Functions below are unique to this store and not part of the Store API *
+ ***************************************************************************/
+
+ /**
+ * Synchronously obtain an add-on from its public ID.
+ *
+ * @param id
+ * Add-on ID
+ * @return Addon or undefined if not found
+ */
+ getAddonByID: function getAddonByID(id) {
+ let cb = Async.makeSyncCallback();
+ AddonManager.getAddonByID(id, cb);
+ return Async.waitForSyncCallback(cb);
+ },
+
+ /**
+ * Synchronously obtain an add-on from its Sync GUID.
+ *
+ * @param guid
+ * Add-on Sync GUID
+ * @return DBAddonInternal or null
+ */
+ getAddonByGUID: function getAddonByGUID(guid) {
+ let cb = Async.makeSyncCallback();
+ AddonManager.getAddonBySyncGUID(guid, cb);
+ return Async.waitForSyncCallback(cb);
+ },
+
+ /**
+ * Determines whether an add-on is suitable for Sync.
+ *
+ * @param addon
+ * Addon instance
+ * @param ignoreRepoCheck
+ * Should we skip checking the Addons repository (primarially useful
+ * for testing and validation).
+ * @return Boolean indicating whether it is appropriate for Sync
+ */
+ isAddonSyncable: function isAddonSyncable(addon, ignoreRepoCheck = false) {
+ // Currently, we limit syncable add-ons to those that are:
+ // 1) In a well-defined set of types
+ // 2) Installed in the current profile
+ // 3) Not installed by a foreign entity (i.e. installed by the app)
+ // since they act like global extensions.
+ // 4) Is not a hotfix.
+ // 5) The addons XPIProvider doesn't veto it (i.e not being installed in
+ // the profile directory, or any other reasons it says the addon can't
+ // be synced)
+ // 6) Are installed from AMO
+
+ // We could represent the test as a complex boolean expression. We go the
+ // verbose route so the failure reason is logged.
+ if (!addon) {
+ this._log.debug("Null object passed to isAddonSyncable.");
+ return false;
+ }
+
+ if (this._syncableTypes.indexOf(addon.type) == -1) {
+ this._log.debug(addon.id + " not syncable: type not in whitelist: " +
+ addon.type);
+ return false;
+ }
+
+ if (!(addon.scope & AddonManager.SCOPE_PROFILE)) {
+ this._log.debug(addon.id + " not syncable: not installed in profile.");
+ return false;
+ }
+
+ // If the addon manager says it's not syncable, we skip it.
+ if (!addon.isSyncable) {
+ this._log.debug(addon.id + " not syncable: vetoed by the addon manager.");
+ return false;
+ }
+
+ // This may be too aggressive. If an add-on is downloaded from AMO and
+ // manually placed in the profile directory, foreignInstall will be set.
+ // Arguably, that add-on should be syncable.
+ // TODO Address the edge case and come up with more robust heuristics.
+ if (addon.foreignInstall) {
+ this._log.debug(addon.id + " not syncable: is foreign install.");
+ return false;
+ }
+
+ // Ignore hotfix extensions (bug 741670). The pref may not be defined.
+ // XXX - note that addon.isSyncable will be false for hotfix addons, so
+ // this check isn't strictly necessary - except for Sync tests which aren't
+ // setup to create a "real" hotfix addon. This can be removed once those
+ // tests are fixed (but keeping it doesn't hurt either)
+ if (this._extensionsPrefs.get("hotfix.id", null) == addon.id) {
+ this._log.debug(addon.id + " not syncable: is a hotfix.");
+ return false;
+ }
+
+ // If the AddonRepository's cache isn't enabled (which it typically isn't
+ // in tests), getCachedAddonByID always returns null - so skip the check
+ // in that case. We also provide a way to specifically opt-out of the check
+ // even if the cache is enabled, which is used by the validators.
+ if (ignoreRepoCheck || !AddonRepository.cacheEnabled) {
+ return true;
+ }
+
+ let cb = Async.makeSyncCallback();
+ AddonRepository.getCachedAddonByID(addon.id, cb);
+ let result = Async.waitForSyncCallback(cb);
+
+ if (!result) {
+ this._log.debug(addon.id + " not syncable: add-on not found in add-on " +
+ "repository.");
+ return false;
+ }
+
+ return this.isSourceURITrusted(result.sourceURI);
+ },
+
+ /**
+ * Determine whether an add-on's sourceURI field is trusted and the add-on
+ * can be installed.
+ *
+ * This function should only ever be called from isAddonSyncable(). It is
+ * exposed as a separate function to make testing easier.
+ *
+ * @param uri
+ * nsIURI instance to validate
+ * @return bool
+ */
+ isSourceURITrusted: function isSourceURITrusted(uri) {
+ // For security reasons, we currently limit synced add-ons to those
+ // installed from trusted hostname(s). We additionally require TLS with
+ // the add-ons site to help prevent forgeries.
+ let trustedHostnames = Svc.Prefs.get("addons.trustedSourceHostnames", "")
+ .split(",");
+
+ if (!uri) {
+ this._log.debug("Undefined argument to isSourceURITrusted().");
+ return false;
+ }
+
+ // Scheme is validated before the hostname because uri.host may not be
+ // populated for certain schemes. It appears to always be populated for
+ // https, so we avoid the potential NS_ERROR_FAILURE on field access.
+ if (uri.scheme != "https") {
+ this._log.debug("Source URI not HTTPS: " + uri.spec);
+ return false;
+ }
+
+ if (trustedHostnames.indexOf(uri.host) == -1) {
+ this._log.debug("Source hostname not trusted: " + uri.host);
+ return false;
+ }
+
+ return true;
+ },
+
+ /**
+ * Update the userDisabled flag on an add-on.
+ *
+ * This will enable or disable an add-on and call the supplied callback when
+ * the action is complete. If no action is needed, the callback gets called
+ * immediately.
+ *
+ * @param addon
+ * Addon instance to manipulate.
+ * @param value
+ * Boolean to which to set userDisabled on the passed Addon.
+ * @param callback
+ * Function to be called when action is complete. Will receive 2
+ * arguments, a truthy value that signifies error, and the Addon
+ * instance passed to this function.
+ */
+ updateUserDisabled: function updateUserDisabled(addon, value, callback) {
+ if (addon.userDisabled == value) {
+ callback(null, addon);
+ return;
+ }
+
+ // A pref allows changes to the enabled flag to be ignored.
+ if (Svc.Prefs.get("addons.ignoreUserEnabledChanges", false)) {
+ this._log.info("Ignoring enabled state change due to preference: " +
+ addon.id);
+ callback(null, addon);
+ return;
+ }
+
+ AddonUtils.updateUserDisabled(addon, value, callback);
+ },
+};
+
+/**
+ * The add-ons tracker keeps track of real-time changes to add-ons.
+ *
+ * It hooks up to the reconciler and receives notifications directly from it.
+ */
+function AddonsTracker(name, engine) {
+ Tracker.call(this, name, engine);
+}
+AddonsTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ get reconciler() {
+ return this.engine._reconciler;
+ },
+
+ get store() {
+ return this.engine._store;
+ },
+
+ /**
+ * This callback is executed whenever the AddonsReconciler sends out a change
+ * notification. See AddonsReconciler.addChangeListener().
+ */
+ changeListener: function changeHandler(date, change, addon) {
+ this._log.debug("changeListener invoked: " + change + " " + addon.id);
+ // Ignore changes that occur during sync.
+ if (this.ignoreAll) {
+ return;
+ }
+
+ if (!this.store.isAddonSyncable(addon)) {
+ this._log.debug("Ignoring change because add-on isn't syncable: " +
+ addon.id);
+ return;
+ }
+
+ this.addChangedID(addon.guid, date.getTime() / 1000);
+ this.score += SCORE_INCREMENT_XLARGE;
+ },
+
+ startTracking: function() {
+ if (this.engine.enabled) {
+ this.reconciler.startListening();
+ }
+
+ this.reconciler.addChangeListener(this);
+ },
+
+ stopTracking: function() {
+ this.reconciler.removeChangeListener(this);
+ this.reconciler.stopListening();
+ },
+};
+
+class AddonValidator extends CollectionValidator {
+ constructor(engine = null) {
+ super("addons", "id", [
+ "addonID",
+ "enabled",
+ "applicationID",
+ "source"
+ ]);
+ this.engine = engine;
+ }
+
+ getClientItems() {
+ return Promise.all([
+ new Promise(resolve =>
+ AddonManager.getAllAddons(resolve)),
+ new Promise(resolve =>
+ AddonManager.getAddonsWithOperationsByTypes(["extension", "theme"], resolve)),
+ ]).then(([installed, addonsWithPendingOperation]) => {
+ // Addons pending install won't be in the first list, but addons pending
+ // uninstall/enable/disable will be in both lists.
+ let all = new Map(installed.map(addon => [addon.id, addon]));
+ for (let addon of addonsWithPendingOperation) {
+ all.set(addon.id, addon);
+ }
+ // Convert to an array since Map.prototype.values returns an iterable
+ return [...all.values()];
+ });
+ }
+
+ normalizeClientItem(item) {
+ let enabled = !item.userDisabled;
+ if (item.pendingOperations & AddonManager.PENDING_ENABLE) {
+ enabled = true;
+ } else if (item.pendingOperations & AddonManager.PENDING_DISABLE) {
+ enabled = false;
+ }
+ return {
+ enabled,
+ id: item.syncGUID,
+ addonID: item.id,
+ applicationID: Services.appinfo.ID,
+ source: "amo", // check item.foreignInstall?
+ original: item
+ };
+ }
+
+ normalizeServerItem(item) {
+ let guid = this.engine._findDupe(item);
+ if (guid) {
+ item.id = guid;
+ }
+ return item;
+ }
+
+ clientUnderstands(item) {
+ return item.applicationID === Services.appinfo.ID;
+ }
+
+ syncedByClient(item) {
+ return !item.original.hidden &&
+ !item.original.isSystem &&
+ !(item.original.pendingOperations & AddonManager.PENDING_UNINSTALL) &&
+ this.engine.isAddonSyncable(item.original, true);
+ }
+}
diff --git a/services/sync/modules/engines/bookmarks.js b/services/sync/modules/engines/bookmarks.js
new file mode 100644
index 000000000..76a198a8b
--- /dev/null
+++ b/services/sync/modules/engines/bookmarks.js
@@ -0,0 +1,1378 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ['BookmarksEngine', "PlacesItem", "Bookmark",
+ "BookmarkFolder", "BookmarkQuery",
+ "Livemark", "BookmarkSeparator"];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://gre/modules/Task.jsm");
+Cu.import("resource://gre/modules/PlacesBackups.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "BookmarkValidator",
+ "resource://services-sync/bookmark_validator.js");
+XPCOMUtils.defineLazyGetter(this, "PlacesBundle", () => {
+ let bundleService = Cc["@mozilla.org/intl/stringbundle;1"]
+ .getService(Ci.nsIStringBundleService);
+ return bundleService.createBundle("chrome://places/locale/places.properties");
+});
+
+const ANNOS_TO_TRACK = [PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO,
+ PlacesSyncUtils.bookmarks.SIDEBAR_ANNO,
+ PlacesUtils.LMANNO_FEEDURI, PlacesUtils.LMANNO_SITEURI];
+
+const SERVICE_NOT_SUPPORTED = "Service not supported on this platform";
+const FOLDER_SORTINDEX = 1000000;
+const {
+ SOURCE_SYNC,
+ SOURCE_IMPORT,
+ SOURCE_IMPORT_REPLACE,
+} = Ci.nsINavBookmarksService;
+
+const SQLITE_MAX_VARIABLE_NUMBER = 999;
+
+const ORGANIZERQUERY_ANNO = "PlacesOrganizer/OrganizerQuery";
+const ALLBOOKMARKS_ANNO = "AllBookmarks";
+const MOBILE_ANNO = "MobileBookmarks";
+
+// The tracker ignores changes made by bookmark import and restore, and
+// changes made by Sync. We don't need to exclude `SOURCE_IMPORT`, but both
+// import and restore fire `bookmarks-restore-*` observer notifications, and
+// the tracker doesn't currently distinguish between the two.
+const IGNORED_SOURCES = [SOURCE_SYNC, SOURCE_IMPORT, SOURCE_IMPORT_REPLACE];
+
+// Returns the constructor for a bookmark record type.
+function getTypeObject(type) {
+ switch (type) {
+ case "bookmark":
+ case "microsummary":
+ return Bookmark;
+ case "query":
+ return BookmarkQuery;
+ case "folder":
+ return BookmarkFolder;
+ case "livemark":
+ return Livemark;
+ case "separator":
+ return BookmarkSeparator;
+ case "item":
+ return PlacesItem;
+ }
+ return null;
+}
+
+this.PlacesItem = function PlacesItem(collection, id, type) {
+ CryptoWrapper.call(this, collection, id);
+ this.type = type || "item";
+}
+PlacesItem.prototype = {
+ decrypt: function PlacesItem_decrypt(keyBundle) {
+ // Do the normal CryptoWrapper decrypt, but change types before returning
+ let clear = CryptoWrapper.prototype.decrypt.call(this, keyBundle);
+
+ // Convert the abstract places item to the actual object type
+ if (!this.deleted)
+ this.__proto__ = this.getTypeObject(this.type).prototype;
+
+ return clear;
+ },
+
+ getTypeObject: function PlacesItem_getTypeObject(type) {
+ let recordObj = getTypeObject(type);
+ if (!recordObj) {
+ throw new Error("Unknown places item object type: " + type);
+ }
+ return recordObj;
+ },
+
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Sync.Record.PlacesItem",
+
+ // Converts the record to a Sync bookmark object that can be passed to
+ // `PlacesSyncUtils.bookmarks.{insert, update}`.
+ toSyncBookmark() {
+ return {
+ kind: this.type,
+ syncId: this.id,
+ parentSyncId: this.parentid,
+ };
+ },
+
+ // Populates the record from a Sync bookmark object returned from
+ // `PlacesSyncUtils.bookmarks.fetch`.
+ fromSyncBookmark(item) {
+ this.parentid = item.parentSyncId;
+ this.parentName = item.parentTitle;
+ },
+};
+
+Utils.deferGetSet(PlacesItem,
+ "cleartext",
+ ["hasDupe", "parentid", "parentName", "type"]);
+
+this.Bookmark = function Bookmark(collection, id, type) {
+ PlacesItem.call(this, collection, id, type || "bookmark");
+}
+Bookmark.prototype = {
+ __proto__: PlacesItem.prototype,
+ _logName: "Sync.Record.Bookmark",
+
+ toSyncBookmark() {
+ let info = PlacesItem.prototype.toSyncBookmark.call(this);
+ info.title = this.title;
+ info.url = this.bmkUri;
+ info.description = this.description;
+ info.loadInSidebar = this.loadInSidebar;
+ info.tags = this.tags;
+ info.keyword = this.keyword;
+ return info;
+ },
+
+ fromSyncBookmark(item) {
+ PlacesItem.prototype.fromSyncBookmark.call(this, item);
+ this.title = item.title;
+ this.bmkUri = item.url.href;
+ this.description = item.description;
+ this.loadInSidebar = item.loadInSidebar;
+ this.tags = item.tags;
+ this.keyword = item.keyword;
+ },
+};
+
+Utils.deferGetSet(Bookmark,
+ "cleartext",
+ ["title", "bmkUri", "description",
+ "loadInSidebar", "tags", "keyword"]);
+
+this.BookmarkQuery = function BookmarkQuery(collection, id) {
+ Bookmark.call(this, collection, id, "query");
+}
+BookmarkQuery.prototype = {
+ __proto__: Bookmark.prototype,
+ _logName: "Sync.Record.BookmarkQuery",
+
+ toSyncBookmark() {
+ let info = Bookmark.prototype.toSyncBookmark.call(this);
+ info.folder = this.folderName;
+ info.query = this.queryId;
+ return info;
+ },
+
+ fromSyncBookmark(item) {
+ Bookmark.prototype.fromSyncBookmark.call(this, item);
+ this.folderName = item.folder;
+ this.queryId = item.query;
+ },
+};
+
+Utils.deferGetSet(BookmarkQuery,
+ "cleartext",
+ ["folderName", "queryId"]);
+
+this.BookmarkFolder = function BookmarkFolder(collection, id, type) {
+ PlacesItem.call(this, collection, id, type || "folder");
+}
+BookmarkFolder.prototype = {
+ __proto__: PlacesItem.prototype,
+ _logName: "Sync.Record.Folder",
+
+ toSyncBookmark() {
+ let info = PlacesItem.prototype.toSyncBookmark.call(this);
+ info.description = this.description;
+ info.title = this.title;
+ return info;
+ },
+
+ fromSyncBookmark(item) {
+ PlacesItem.prototype.fromSyncBookmark.call(this, item);
+ this.title = item.title;
+ this.description = item.description;
+ this.children = item.childSyncIds;
+ },
+};
+
+Utils.deferGetSet(BookmarkFolder, "cleartext", ["description", "title",
+ "children"]);
+
+this.Livemark = function Livemark(collection, id) {
+ BookmarkFolder.call(this, collection, id, "livemark");
+}
+Livemark.prototype = {
+ __proto__: BookmarkFolder.prototype,
+ _logName: "Sync.Record.Livemark",
+
+ toSyncBookmark() {
+ let info = BookmarkFolder.prototype.toSyncBookmark.call(this);
+ info.feed = this.feedUri;
+ info.site = this.siteUri;
+ return info;
+ },
+
+ fromSyncBookmark(item) {
+ BookmarkFolder.prototype.fromSyncBookmark.call(this, item);
+ this.feedUri = item.feed.href;
+ if (item.site) {
+ this.siteUri = item.site.href;
+ }
+ },
+};
+
+Utils.deferGetSet(Livemark, "cleartext", ["siteUri", "feedUri"]);
+
+this.BookmarkSeparator = function BookmarkSeparator(collection, id) {
+ PlacesItem.call(this, collection, id, "separator");
+}
+BookmarkSeparator.prototype = {
+ __proto__: PlacesItem.prototype,
+ _logName: "Sync.Record.Separator",
+
+ fromSyncBookmark(item) {
+ PlacesItem.prototype.fromSyncBookmark.call(this, item);
+ this.pos = item.index;
+ },
+};
+
+Utils.deferGetSet(BookmarkSeparator, "cleartext", "pos");
+
+this.BookmarksEngine = function BookmarksEngine(service) {
+ SyncEngine.call(this, "Bookmarks", service);
+}
+BookmarksEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _recordObj: PlacesItem,
+ _storeObj: BookmarksStore,
+ _trackerObj: BookmarksTracker,
+ version: 2,
+ _defaultSort: "index",
+
+ syncPriority: 4,
+ allowSkippedRecord: false,
+
+ // A diagnostic helper to get the string value for a bookmark's URL given
+ // its ID. Always returns a string - on error will return a string in the
+ // form of "<description of error>" as this is purely for, eg, logging.
+ // (This means hitting the DB directly and we don't bother using a cached
+ // statement - we should rarely hit this.)
+ _getStringUrlForId(id) {
+ let url;
+ try {
+ let stmt = this._store._getStmt(`
+ SELECT h.url
+ FROM moz_places h
+ JOIN moz_bookmarks b ON h.id = b.fk
+ WHERE b.id = :id`);
+ stmt.params.id = id;
+ let rows = Async.querySpinningly(stmt, ["url"]);
+ url = rows.length == 0 ? "<not found>" : rows[0].url;
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ if (ex instanceof Ci.mozIStorageError) {
+ url = `<failed: Storage error: ${ex.message} (${ex.result})>`;
+ } else {
+ url = `<failed: ${ex.toString()}>`;
+ }
+ }
+ return url;
+ },
+
+ _guidMapFailed: false,
+ _buildGUIDMap: function _buildGUIDMap() {
+ let store = this._store;
+ let guidMap = {};
+ let tree = Async.promiseSpinningly(PlacesUtils.promiseBookmarksTree("", {
+ includeItemIds: true
+ }));
+ function* walkBookmarksTree(tree, parent=null) {
+ if (tree) {
+ // Skip root node
+ if (parent) {
+ yield [tree, parent];
+ }
+ if (tree.children) {
+ for (let child of tree.children) {
+ store._sleep(0); // avoid jank while looping.
+ yield* walkBookmarksTree(child, tree);
+ }
+ }
+ }
+ }
+
+ function* walkBookmarksRoots(tree, rootIDs) {
+ for (let id of rootIDs) {
+ let bookmarkRoot = tree.children.find(child => child.id === id);
+ if (bookmarkRoot === null) {
+ continue;
+ }
+ yield* walkBookmarksTree(bookmarkRoot, tree);
+ }
+ }
+
+ let rootsToWalk = getChangeRootIds();
+
+ for (let [node, parent] of walkBookmarksRoots(tree, rootsToWalk)) {
+ let {guid, id, type: placeType} = node;
+ guid = PlacesSyncUtils.bookmarks.guidToSyncId(guid);
+ let key;
+ switch (placeType) {
+ case PlacesUtils.TYPE_X_MOZ_PLACE:
+ // Bookmark
+ let query = null;
+ if (node.annos && node.uri.startsWith("place:")) {
+ query = node.annos.find(({name}) =>
+ name === PlacesSyncUtils.bookmarks.SMART_BOOKMARKS_ANNO);
+ }
+ if (query && query.value) {
+ key = "q" + query.value;
+ } else {
+ key = "b" + node.uri + ":" + (node.title || "");
+ }
+ break;
+ case PlacesUtils.TYPE_X_MOZ_PLACE_CONTAINER:
+ // Folder
+ key = "f" + (node.title || "");
+ break;
+ case PlacesUtils.TYPE_X_MOZ_PLACE_SEPARATOR:
+ // Separator
+ key = "s" + node.index;
+ break;
+ default:
+ this._log.error("Unknown place type: '"+placeType+"'");
+ continue;
+ }
+
+ let parentName = parent.title || "";
+ if (guidMap[parentName] == null)
+ guidMap[parentName] = {};
+
+ // If the entry already exists, remember that there are explicit dupes.
+ let entry = new String(guid);
+ entry.hasDupe = guidMap[parentName][key] != null;
+
+ // Remember this item's GUID for its parent-name/key pair.
+ guidMap[parentName][key] = entry;
+ this._log.trace("Mapped: " + [parentName, key, entry, entry.hasDupe]);
+ }
+
+ return guidMap;
+ },
+
+ // Helper function to get a dupe GUID for an item.
+ _mapDupe: function _mapDupe(item) {
+ // Figure out if we have something to key with.
+ let key;
+ let altKey;
+ switch (item.type) {
+ case "query":
+ // Prior to Bug 610501, records didn't carry their Smart Bookmark
+ // anno, so we won't be able to dupe them correctly. This altKey
+ // hack should get them to dupe correctly.
+ if (item.queryId) {
+ key = "q" + item.queryId;
+ altKey = "b" + item.bmkUri + ":" + (item.title || "");
+ break;
+ }
+ // No queryID? Fall through to the regular bookmark case.
+ case "bookmark":
+ case "microsummary":
+ key = "b" + item.bmkUri + ":" + (item.title || "");
+ break;
+ case "folder":
+ case "livemark":
+ key = "f" + (item.title || "");
+ break;
+ case "separator":
+ key = "s" + item.pos;
+ break;
+ default:
+ return;
+ }
+
+ // Figure out if we have a map to use!
+ // This will throw in some circumstances. That's fine.
+ let guidMap = this._guidMap;
+
+ // Give the GUID if we have the matching pair.
+ let parentName = item.parentName || "";
+ this._log.trace("Finding mapping: " + parentName + ", " + key);
+ let parent = guidMap[parentName];
+
+ if (!parent) {
+ this._log.trace("No parent => no dupe.");
+ return undefined;
+ }
+
+ let dupe = parent[key];
+
+ if (dupe) {
+ this._log.trace("Mapped dupe: " + dupe);
+ return dupe;
+ }
+
+ if (altKey) {
+ dupe = parent[altKey];
+ if (dupe) {
+ this._log.trace("Mapped dupe using altKey " + altKey + ": " + dupe);
+ return dupe;
+ }
+ }
+
+ this._log.trace("No dupe found for key " + key + "/" + altKey + ".");
+ return undefined;
+ },
+
+ _syncStartup: function _syncStart() {
+ SyncEngine.prototype._syncStartup.call(this);
+
+ let cb = Async.makeSpinningCallback();
+ Task.spawn(function* () {
+ // For first-syncs, make a backup for the user to restore
+ if (this.lastSync == 0) {
+ this._log.debug("Bookmarks backup starting.");
+ yield PlacesBackups.create(null, true);
+ this._log.debug("Bookmarks backup done.");
+ }
+ }.bind(this)).then(
+ cb, ex => {
+ // Failure to create a backup is somewhat bad, but probably not bad
+ // enough to prevent syncing of bookmarks - so just log the error and
+ // continue.
+ this._log.warn("Error while backing up bookmarks, but continuing with sync", ex);
+ cb();
+ }
+ );
+
+ cb.wait();
+
+ this.__defineGetter__("_guidMap", function() {
+ // Create a mapping of folder titles and separator positions to GUID.
+ // We do this lazily so that we don't do any work unless we reconcile
+ // incoming items.
+ let guidMap;
+ try {
+ guidMap = this._buildGUIDMap();
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.warn("Error while building GUID map, skipping all other incoming items", ex);
+ throw {code: Engine.prototype.eEngineAbortApplyIncoming,
+ cause: ex};
+ }
+ delete this._guidMap;
+ return this._guidMap = guidMap;
+ });
+
+ this._store._childrenToOrder = {};
+ this._store.clearPendingDeletions();
+ },
+
+ _deletePending() {
+ // Delete pending items -- See the comment above BookmarkStore's deletePending
+ let newlyModified = Async.promiseSpinningly(this._store.deletePending());
+ let now = this._tracker._now();
+ this._log.debug("Deleted pending items", newlyModified);
+ for (let modifiedSyncID of newlyModified) {
+ if (!this._modified.has(modifiedSyncID)) {
+ this._modified.set(modifiedSyncID, { timestamp: now, deleted: false });
+ }
+ }
+ },
+
+ // We avoid reviving folders since reviving them properly would require
+ // reviving their children as well. Unfortunately, this is the wrong choice
+ // in the case of a bookmark restore where wipeServer failed -- if the
+ // server has the folder as deleted, we *would* want to reupload this folder.
+ // This is mitigated by the fact that we move any undeleted children to the
+ // grandparent when deleting the parent.
+ _shouldReviveRemotelyDeletedRecord(item) {
+ let kind = Async.promiseSpinningly(
+ PlacesSyncUtils.bookmarks.getKindForSyncId(item.id));
+ if (kind === PlacesSyncUtils.bookmarks.KINDS.FOLDER) {
+ return false;
+ }
+
+ // In addition to preventing the deletion of this record (handled by the caller),
+ // we need to mark the parent of this record for uploading next sync, in order
+ // to ensure its children array is accurate.
+ let modifiedTimestamp = this._modified.getModifiedTimestamp(item.id);
+ if (!modifiedTimestamp) {
+ // We only expect this to be called with items locally modified, so
+ // something strange is going on - play it safe and don't revive it.
+ this._log.error("_shouldReviveRemotelyDeletedRecord called on unmodified item: " + item.id);
+ return false;
+ }
+
+ let localID = this._store.idForGUID(item.id);
+ let localParentID = PlacesUtils.bookmarks.getFolderIdForItem(localID);
+ let localParentSyncID = this._store.GUIDForId(localParentID);
+
+ this._log.trace(`Reviving item "${item.id}" and marking parent ${localParentSyncID} as modified.`);
+
+ if (!this._modified.has(localParentSyncID)) {
+ this._modified.set(localParentSyncID, {
+ timestamp: modifiedTimestamp,
+ deleted: false
+ });
+ }
+ return true
+ },
+
+ _processIncoming: function (newitems) {
+ try {
+ SyncEngine.prototype._processIncoming.call(this, newitems);
+ } finally {
+ try {
+ this._deletePending();
+ } finally {
+ // Reorder children.
+ this._store._orderChildren();
+ delete this._store._childrenToOrder;
+ }
+ }
+ },
+
+ _syncFinish: function _syncFinish() {
+ SyncEngine.prototype._syncFinish.call(this);
+ this._tracker._ensureMobileQuery();
+ },
+
+ _syncCleanup: function _syncCleanup() {
+ SyncEngine.prototype._syncCleanup.call(this);
+ delete this._guidMap;
+ },
+
+ _createRecord: function _createRecord(id) {
+ // Create the record as usual, but mark it as having dupes if necessary.
+ let record = SyncEngine.prototype._createRecord.call(this, id);
+ let entry = this._mapDupe(record);
+ if (entry != null && entry.hasDupe) {
+ record.hasDupe = true;
+ }
+ return record;
+ },
+
+ _findDupe: function _findDupe(item) {
+ this._log.trace("Finding dupe for " + item.id +
+ " (already duped: " + item.hasDupe + ").");
+
+ // Don't bother finding a dupe if the incoming item has duplicates.
+ if (item.hasDupe) {
+ this._log.trace(item.id + " already a dupe: not finding one.");
+ return;
+ }
+ let mapped = this._mapDupe(item);
+ this._log.debug(item.id + " mapped to " + mapped);
+ // We must return a string, not an object, and the entries in the GUIDMap
+ // are created via "new String()" making them an object.
+ return mapped ? mapped.toString() : mapped;
+ },
+
+ pullAllChanges() {
+ return new BookmarksChangeset(this._store.getAllIDs());
+ },
+
+ pullNewChanges() {
+ let modifiedGUIDs = this._getModifiedGUIDs();
+ if (!modifiedGUIDs.length) {
+ return new BookmarksChangeset(this._tracker.changedIDs);
+ }
+
+ // We don't use `PlacesUtils.promiseDBConnection` here because
+ // `getChangedIDs` might be called while we're in a batch, meaning we
+ // won't see any changes until the batch finishes and the transaction
+ // commits.
+ let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase)
+ .DBConnection;
+
+ // Filter out tags, organizer queries, and other descendants that we're
+ // not tracking. We chunk `modifiedGUIDs` because SQLite limits the number
+ // of bound parameters per query.
+ for (let startIndex = 0;
+ startIndex < modifiedGUIDs.length;
+ startIndex += SQLITE_MAX_VARIABLE_NUMBER) {
+
+ let chunkLength = Math.min(SQLITE_MAX_VARIABLE_NUMBER,
+ modifiedGUIDs.length - startIndex);
+
+ let query = `
+ WITH RECURSIVE
+ modifiedGuids(guid) AS (
+ VALUES ${new Array(chunkLength).fill("(?)").join(", ")}
+ ),
+ syncedItems(id) AS (
+ VALUES ${getChangeRootIds().map(id => `(${id})`).join(", ")}
+ UNION ALL
+ SELECT b.id
+ FROM moz_bookmarks b
+ JOIN syncedItems s ON b.parent = s.id
+ )
+ SELECT b.guid
+ FROM modifiedGuids m
+ JOIN moz_bookmarks b ON b.guid = m.guid
+ LEFT JOIN syncedItems s ON b.id = s.id
+ WHERE s.id IS NULL
+ `;
+
+ let statement = db.createAsyncStatement(query);
+ try {
+ for (let i = 0; i < chunkLength; i++) {
+ statement.bindByIndex(i, modifiedGUIDs[startIndex + i]);
+ }
+ let results = Async.querySpinningly(statement, ["guid"]);
+ for (let { guid } of results) {
+ let syncID = PlacesSyncUtils.bookmarks.guidToSyncId(guid);
+ this._tracker.removeChangedID(syncID);
+ }
+ } finally {
+ statement.finalize();
+ }
+ }
+
+ return new BookmarksChangeset(this._tracker.changedIDs);
+ },
+
+ // Returns an array of Places GUIDs for all changed items. Ignores deletions,
+ // which won't exist in the DB and shouldn't be removed from the tracker.
+ _getModifiedGUIDs() {
+ let guids = [];
+ for (let syncID in this._tracker.changedIDs) {
+ if (this._tracker.changedIDs[syncID].deleted === true) {
+ // The `===` check also filters out old persisted timestamps,
+ // which won't have a `deleted` property.
+ continue;
+ }
+ let guid = PlacesSyncUtils.bookmarks.syncIdToGuid(syncID);
+ guids.push(guid);
+ }
+ return guids;
+ },
+
+ // Called when _findDupe returns a dupe item and the engine has decided to
+ // switch the existing item to the new incoming item.
+ _switchItemToDupe(localDupeGUID, incomingItem) {
+ // We unconditionally change the item's ID in case the engine knows of
+ // an item but doesn't expose it through itemExists. If the API
+ // contract were stronger, this could be changed.
+ this._log.debug("Switching local ID to incoming: " + localDupeGUID + " -> " +
+ incomingItem.id);
+ this._store.changeItemID(localDupeGUID, incomingItem.id);
+
+ // And mark the parent as being modified. Given we de-dupe based on the
+ // parent *name* it's possible the item having its GUID changed has a
+ // different parent from the incoming record.
+ // So we need to find the GUID of the local parent.
+ let now = this._tracker._now();
+ let localID = this._store.idForGUID(incomingItem.id);
+ let localParentID = PlacesUtils.bookmarks.getFolderIdForItem(localID);
+ let localParentGUID = this._store.GUIDForId(localParentID);
+ this._modified.set(localParentGUID, { modified: now, deleted: false });
+
+ // And we also add the parent as reflected in the incoming record as the
+ // de-dupe process might have used an existing item in a different folder.
+ // But only if the parent exists, otherwise we will upload a deleted item
+ // when it might actually be valid, just unknown to us. Note that this
+ // scenario will still leave us with inconsistent client and server states;
+ // the incoming record on the server references a parent that isn't the
+ // actual parent locally - see bug 1297955.
+ if (localParentGUID != incomingItem.parentid) {
+ let remoteParentID = this._store.idForGUID(incomingItem.parentid);
+ if (remoteParentID > 0) {
+ // The parent specified in the record does exist, so we are going to
+ // attempt a move when we come to applying the record. Mark the parent
+ // as being modified so we will later upload it with the new child
+ // reference.
+ this._modified.set(incomingItem.parentid, { modified: now, deleted: false });
+ } else {
+ // We aren't going to do a move as we don't have the parent (yet?).
+ // When applying the record we will add our special PARENT_ANNO
+ // annotation, so if it arrives in the future (either this Sync or a
+ // later one) it will be reparented.
+ this._log.debug(`Incoming duplicate item ${incomingItem.id} specifies ` +
+ `non-existing parent ${incomingItem.parentid}`);
+ }
+ }
+
+ // The local, duplicate ID is always deleted on the server - but for
+ // bookmarks it is a logical delete.
+ // Simply adding this (now non-existing) ID to the tracker is enough.
+ this._modified.set(localDupeGUID, { modified: now, deleted: true });
+ },
+ getValidator() {
+ return new BookmarkValidator();
+ }
+};
+
+function BookmarksStore(name, engine) {
+ Store.call(this, name, engine);
+ this._foldersToDelete = new Set();
+ this._atomsToDelete = new Set();
+ // Explicitly nullify our references to our cached services so we don't leak
+ Svc.Obs.add("places-shutdown", function() {
+ for (let query in this._stmts) {
+ let stmt = this._stmts[query];
+ stmt.finalize();
+ }
+ this._stmts = {};
+ }, this);
+}
+BookmarksStore.prototype = {
+ __proto__: Store.prototype,
+
+ itemExists: function BStore_itemExists(id) {
+ return this.idForGUID(id) > 0;
+ },
+
+ applyIncoming: function BStore_applyIncoming(record) {
+ this._log.debug("Applying record " + record.id);
+ let isSpecial = PlacesSyncUtils.bookmarks.ROOTS.includes(record.id);
+
+ if (record.deleted) {
+ if (isSpecial) {
+ this._log.warn("Ignoring deletion for special record " + record.id);
+ return;
+ }
+
+ // Don't bother with pre and post-processing for deletions.
+ Store.prototype.applyIncoming.call(this, record);
+ return;
+ }
+
+ // For special folders we're only interested in child ordering.
+ if (isSpecial && record.children) {
+ this._log.debug("Processing special node: " + record.id);
+ // Reorder children later
+ this._childrenToOrder[record.id] = record.children;
+ return;
+ }
+
+ // Skip malformed records. (Bug 806460.)
+ if (record.type == "query" &&
+ !record.bmkUri) {
+ this._log.warn("Skipping malformed query bookmark: " + record.id);
+ return;
+ }
+
+ // Figure out the local id of the parent GUID if available
+ let parentGUID = record.parentid;
+ if (!parentGUID) {
+ throw "Record " + record.id + " has invalid parentid: " + parentGUID;
+ }
+ this._log.debug("Remote parent is " + parentGUID);
+
+ // Do the normal processing of incoming records
+ Store.prototype.applyIncoming.call(this, record);
+
+ if (record.type == "folder" && record.children) {
+ this._childrenToOrder[record.id] = record.children;
+ }
+ },
+
+ create: function BStore_create(record) {
+ let info = record.toSyncBookmark();
+ // This can throw if we're inserting an invalid or incomplete bookmark.
+ // That's fine; the exception will be caught by `applyIncomingBatch`
+ // without aborting further processing.
+ let item = Async.promiseSpinningly(PlacesSyncUtils.bookmarks.insert(info));
+ if (item) {
+ this._log.debug(`Created ${item.kind} ${item.syncId} under ${
+ item.parentSyncId}`, item);
+ }
+ },
+
+ remove: function BStore_remove(record) {
+ if (PlacesSyncUtils.bookmarks.isRootSyncID(record.id)) {
+ this._log.warn("Refusing to remove special folder " + record.id);
+ return;
+ }
+ let recordKind = Async.promiseSpinningly(
+ PlacesSyncUtils.bookmarks.getKindForSyncId(record.id));
+ let isFolder = recordKind === PlacesSyncUtils.bookmarks.KINDS.FOLDER;
+ this._log.trace(`Buffering removal of item "${record.id}" of type "${recordKind}".`);
+ if (isFolder) {
+ this._foldersToDelete.add(record.id);
+ } else {
+ this._atomsToDelete.add(record.id);
+ }
+ },
+
+ update: function BStore_update(record) {
+ let info = record.toSyncBookmark();
+ let item = Async.promiseSpinningly(PlacesSyncUtils.bookmarks.update(info));
+ if (item) {
+ this._log.debug(`Updated ${item.kind} ${item.syncId} under ${
+ item.parentSyncId}`, item);
+ }
+ },
+
+ _orderChildren: function _orderChildren() {
+ let promises = Object.keys(this._childrenToOrder).map(syncID => {
+ let children = this._childrenToOrder[syncID];
+ return PlacesSyncUtils.bookmarks.order(syncID, children).catch(ex => {
+ this._log.debug(`Could not order children for ${syncID}`, ex);
+ });
+ });
+ Async.promiseSpinningly(Promise.all(promises));
+ },
+
+ // There's some complexity here around pending deletions. Our goals:
+ //
+ // - Don't delete any bookmarks a user has created but not explicitly deleted
+ // (This includes any bookmark that was not a child of the folder at the
+ // time the deletion was recorded, and also bookmarks restored from a backup).
+ // - Don't undelete any bookmark without ensuring the server structure
+ // includes it (see `BookmarkEngine.prototype._shouldReviveRemotelyDeletedRecord`)
+ //
+ // This leads the following approach:
+ //
+ // - Additions, moves, and updates are processed before deletions.
+ // - To do this, all deletion operations are buffered during a sync. Folders
+ // we plan on deleting have their sync id's stored in `this._foldersToDelete`,
+ // and non-folders we plan on deleting have their sync id's stored in
+ // `this._atomsToDelete`.
+ // - The exception to this is the moves that occur to fix the order of bookmark
+ // children, which are performed after we process deletions.
+ // - Non-folders are deleted before folder deletions, so that when we process
+ // folder deletions we know the correct state.
+ // - Remote deletions always win for folders, but do not result in recursive
+ // deletion of children. This is a hack because we're not able to distinguish
+ // between value changes and structural changes to folders, and we don't even
+ // have the old server record to compare to. See `BookmarkEngine`'s
+ // `_shouldReviveRemotelyDeletedRecord` method.
+ // - When a folder is deleted, its remaining children are moved in order to
+ // their closest living ancestor. If this is interrupted (unlikely, but
+ // possible given that we don't perform this operation in a transaction),
+ // we revive the folder.
+ // - Remote deletions can lose for non-folders, but only until we handle
+ // bookmark restores correctly (removing stale state from the server -- this
+ // is to say, if bug 1230011 is fixed, we should never revive bookmarks).
+
+ deletePending: Task.async(function* deletePending() {
+ yield this._deletePendingAtoms();
+ let guidsToUpdate = yield this._deletePendingFolders();
+ this.clearPendingDeletions();
+ return guidsToUpdate;
+ }),
+
+ clearPendingDeletions() {
+ this._foldersToDelete.clear();
+ this._atomsToDelete.clear();
+ },
+
+ _deleteAtom: Task.async(function* _deleteAtom(syncID) {
+ try {
+ let info = yield PlacesSyncUtils.bookmarks.remove(syncID, {
+ preventRemovalOfNonEmptyFolders: true
+ });
+ this._log.trace(`Removed item ${syncID} with type ${info.type}`);
+ } catch (ex) {
+ // Likely already removed.
+ this._log.trace(`Error removing ${syncID}`, ex);
+ }
+ }),
+
+ _deletePendingAtoms() {
+ return Promise.all(
+ [...this._atomsToDelete.values()]
+ .map(syncID => this._deleteAtom(syncID)));
+ },
+
+ // Returns an array of sync ids that need updates.
+ _deletePendingFolders: Task.async(function* _deletePendingFolders() {
+ // To avoid data loss, we don't want to just delete the folder outright,
+ // so we buffer folder deletions and process them at the end (now).
+ //
+ // At this point, any member in the folder that remains is either a folder
+ // pending deletion (which we'll get to in this function), or an item that
+ // should not be deleted. To avoid deleting these items, we first move them
+ // to the parent of the folder we're about to delete.
+ let needUpdate = new Set();
+ for (let syncId of this._foldersToDelete) {
+ let childSyncIds = yield PlacesSyncUtils.bookmarks.fetchChildSyncIds(syncId);
+ if (!childSyncIds.length) {
+ // No children -- just delete the folder.
+ yield this._deleteAtom(syncId)
+ continue;
+ }
+ // We could avoid some redundant work here by finding the nearest
+ // grandparent who isn't present in `this._toDelete`...
+
+ let grandparentSyncId = this.GUIDForId(
+ PlacesUtils.bookmarks.getFolderIdForItem(
+ this.idForGUID(PlacesSyncUtils.bookmarks.syncIdToGuid(syncId))));
+
+ this._log.trace(`Moving ${childSyncIds.length} children of "${syncId}" to ` +
+ `grandparent "${grandparentSyncId}" before deletion.`);
+
+ // Move children out of the parent and into the grandparent
+ yield Promise.all(childSyncIds.map(child => PlacesSyncUtils.bookmarks.update({
+ syncId: child,
+ parentSyncId: grandparentSyncId
+ })));
+
+ // Delete the (now empty) parent
+ try {
+ yield PlacesSyncUtils.bookmarks.remove(syncId, {
+ preventRemovalOfNonEmptyFolders: true
+ });
+ } catch (e) {
+ // We failed, probably because someone added something to this folder
+ // between when we got the children and now (or the database is corrupt,
+ // or something else happened...) This is unlikely, but possible. To
+ // avoid corruption in this case, we need to reupload the record to the
+ // server.
+ //
+ // (Ideally this whole operation would be done in a transaction, and this
+ // wouldn't be possible).
+ needUpdate.add(syncId);
+ }
+
+ // Add children (for parentid) and grandparent (for children list) to set
+ // of records needing an update, *unless* they're marked for deletion.
+ if (!this._foldersToDelete.has(grandparentSyncId)) {
+ needUpdate.add(grandparentSyncId);
+ }
+ for (let childSyncId of childSyncIds) {
+ if (!this._foldersToDelete.has(childSyncId)) {
+ needUpdate.add(childSyncId);
+ }
+ }
+ }
+ return [...needUpdate];
+ }),
+
+ changeItemID: function BStore_changeItemID(oldID, newID) {
+ this._log.debug("Changing GUID " + oldID + " to " + newID);
+
+ Async.promiseSpinningly(PlacesSyncUtils.bookmarks.changeGuid(oldID, newID));
+ },
+
+ // Create a record starting from the weave id (places guid)
+ createRecord: function createRecord(id, collection) {
+ let item = Async.promiseSpinningly(PlacesSyncUtils.bookmarks.fetch(id));
+ if (!item) { // deleted item
+ let record = new PlacesItem(collection, id);
+ record.deleted = true;
+ return record;
+ }
+
+ let recordObj = getTypeObject(item.kind);
+ if (!recordObj) {
+ this._log.warn("Unknown item type, cannot serialize: " + item.kind);
+ recordObj = PlacesItem;
+ }
+ let record = new recordObj(collection, id);
+ record.fromSyncBookmark(item);
+
+ record.sortindex = this._calculateIndex(record);
+
+ return record;
+ },
+
+ _stmts: {},
+ _getStmt: function(query) {
+ if (query in this._stmts) {
+ return this._stmts[query];
+ }
+
+ this._log.trace("Creating SQL statement: " + query);
+ let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase)
+ .DBConnection;
+ return this._stmts[query] = db.createAsyncStatement(query);
+ },
+
+ get _frecencyStm() {
+ return this._getStmt(
+ "SELECT frecency " +
+ "FROM moz_places " +
+ "WHERE url_hash = hash(:url) AND url = :url " +
+ "LIMIT 1");
+ },
+ _frecencyCols: ["frecency"],
+
+ GUIDForId: function GUIDForId(id) {
+ let guid = Async.promiseSpinningly(PlacesUtils.promiseItemGuid(id));
+ return PlacesSyncUtils.bookmarks.guidToSyncId(guid);
+ },
+
+ idForGUID: function idForGUID(guid) {
+ // guid might be a String object rather than a string.
+ guid = PlacesSyncUtils.bookmarks.syncIdToGuid(guid.toString());
+
+ return Async.promiseSpinningly(PlacesUtils.promiseItemId(guid).catch(
+ ex => -1));
+ },
+
+ _calculateIndex: function _calculateIndex(record) {
+ // Ensure folders have a very high sort index so they're not synced last.
+ if (record.type == "folder")
+ return FOLDER_SORTINDEX;
+
+ // For anything directly under the toolbar, give it a boost of more than an
+ // unvisited bookmark
+ let index = 0;
+ if (record.parentid == "toolbar")
+ index += 150;
+
+ // Add in the bookmark's frecency if we have something.
+ if (record.bmkUri != null) {
+ this._frecencyStm.params.url = record.bmkUri;
+ let result = Async.querySpinningly(this._frecencyStm, this._frecencyCols);
+ if (result.length)
+ index += result[0].frecency;
+ }
+
+ return index;
+ },
+
+ getAllIDs: function BStore_getAllIDs() {
+ let items = {};
+
+ let query = `
+ WITH RECURSIVE
+ changeRootContents(id) AS (
+ VALUES ${getChangeRootIds().map(id => `(${id})`).join(", ")}
+ UNION ALL
+ SELECT b.id
+ FROM moz_bookmarks b
+ JOIN changeRootContents c ON b.parent = c.id
+ )
+ SELECT guid
+ FROM changeRootContents
+ JOIN moz_bookmarks USING (id)
+ `;
+
+ let statement = this._getStmt(query);
+ let results = Async.querySpinningly(statement, ["guid"]);
+ for (let { guid } of results) {
+ let syncID = PlacesSyncUtils.bookmarks.guidToSyncId(guid);
+ items[syncID] = { modified: 0, deleted: false };
+ }
+
+ return items;
+ },
+
+ wipe: function BStore_wipe() {
+ this.clearPendingDeletions();
+ Async.promiseSpinningly(Task.spawn(function* () {
+ // Save a backup before clearing out all bookmarks.
+ yield PlacesBackups.create(null, true);
+ yield PlacesUtils.bookmarks.eraseEverything({
+ source: SOURCE_SYNC,
+ });
+ }));
+ }
+};
+
+function BookmarksTracker(name, engine) {
+ this._batchDepth = 0;
+ this._batchSawScoreIncrement = false;
+ Tracker.call(this, name, engine);
+
+ Svc.Obs.add("places-shutdown", this);
+}
+BookmarksTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ //`_ignore` checks the change source for each observer notification, so we
+ // don't want to let the engine ignore all changes during a sync.
+ get ignoreAll() {
+ return false;
+ },
+
+ // Define an empty setter so that the engine doesn't throw a `TypeError`
+ // setting a read-only property.
+ set ignoreAll(value) {},
+
+ startTracking: function() {
+ PlacesUtils.bookmarks.addObserver(this, true);
+ Svc.Obs.add("bookmarks-restore-begin", this);
+ Svc.Obs.add("bookmarks-restore-success", this);
+ Svc.Obs.add("bookmarks-restore-failed", this);
+ },
+
+ stopTracking: function() {
+ PlacesUtils.bookmarks.removeObserver(this);
+ Svc.Obs.remove("bookmarks-restore-begin", this);
+ Svc.Obs.remove("bookmarks-restore-success", this);
+ Svc.Obs.remove("bookmarks-restore-failed", this);
+ },
+
+ observe: function observe(subject, topic, data) {
+ Tracker.prototype.observe.call(this, subject, topic, data);
+
+ switch (topic) {
+ case "bookmarks-restore-begin":
+ this._log.debug("Ignoring changes from importing bookmarks.");
+ break;
+ case "bookmarks-restore-success":
+ this._log.debug("Tracking all items on successful import.");
+
+ this._log.debug("Restore succeeded: wiping server and other clients.");
+ this.engine.service.resetClient([this.name]);
+ this.engine.service.wipeServer([this.name]);
+ this.engine.service.clientsEngine.sendCommand("wipeEngine", [this.name]);
+ break;
+ case "bookmarks-restore-failed":
+ this._log.debug("Tracking all items on failed import.");
+ break;
+ }
+ },
+
+ QueryInterface: XPCOMUtils.generateQI([
+ Ci.nsINavBookmarkObserver,
+ Ci.nsINavBookmarkObserver_MOZILLA_1_9_1_ADDITIONS,
+ Ci.nsISupportsWeakReference
+ ]),
+
+ addChangedID(id, change) {
+ if (!id) {
+ this._log.warn("Attempted to add undefined ID to tracker");
+ return false;
+ }
+ if (this._ignored.includes(id)) {
+ return false;
+ }
+ let shouldSaveChange = false;
+ let currentChange = this.changedIDs[id];
+ if (currentChange) {
+ if (typeof currentChange == "number") {
+ // Allow raw timestamps for backward-compatibility with persisted
+ // changed IDs. The new format uses tuples to track deleted items.
+ shouldSaveChange = currentChange < change.modified;
+ } else {
+ shouldSaveChange = currentChange.modified < change.modified ||
+ currentChange.deleted != change.deleted;
+ }
+ } else {
+ shouldSaveChange = true;
+ }
+ if (shouldSaveChange) {
+ this._saveChangedID(id, change);
+ }
+ return true;
+ },
+
+ /**
+ * Add a bookmark GUID to be uploaded and bump up the sync score.
+ *
+ * @param itemId
+ * The Places item ID of the bookmark to upload.
+ * @param guid
+ * The Places GUID of the bookmark to upload.
+ * @param isTombstone
+ * Whether we're uploading a tombstone for a removed bookmark.
+ */
+ _add: function BMT__add(itemId, guid, isTombstone = false) {
+ let syncID = PlacesSyncUtils.bookmarks.guidToSyncId(guid);
+ let info = { modified: Date.now() / 1000, deleted: isTombstone };
+ if (this.addChangedID(syncID, info)) {
+ this._upScore();
+ }
+ },
+
+ /* Every add/remove/change will trigger a sync for MULTI_DEVICE (except in
+ a batch operation, where we do it at the end of the batch) */
+ _upScore: function BMT__upScore() {
+ if (this._batchDepth == 0) {
+ this.score += SCORE_INCREMENT_XLARGE;
+ } else {
+ this._batchSawScoreIncrement = true;
+ }
+ },
+
+ onItemAdded: function BMT_onItemAdded(itemId, folder, index,
+ itemType, uri, title, dateAdded,
+ guid, parentGuid, source) {
+ if (IGNORED_SOURCES.includes(source)) {
+ return;
+ }
+
+ this._log.trace("onItemAdded: " + itemId);
+ this._add(itemId, guid);
+ this._add(folder, parentGuid);
+ },
+
+ onItemRemoved: function (itemId, parentId, index, type, uri,
+ guid, parentGuid, source) {
+ if (IGNORED_SOURCES.includes(source)) {
+ return;
+ }
+
+ // Ignore changes to tags (folders under the tags folder).
+ if (parentId == PlacesUtils.tagsFolderId) {
+ return;
+ }
+
+ let grandParentId = -1;
+ try {
+ grandParentId = PlacesUtils.bookmarks.getFolderIdForItem(parentId);
+ } catch (ex) {
+ // `getFolderIdForItem` can throw if the item no longer exists, such as
+ // when we've removed a subtree using `removeFolderChildren`.
+ return;
+ }
+
+ // Ignore tag items (the actual instance of a tag for a bookmark).
+ if (grandParentId == PlacesUtils.tagsFolderId) {
+ return;
+ }
+
+ /**
+ * The above checks are incomplete: we can still write tombstones for
+ * items that we don't track, and upload extraneous roots.
+ *
+ * Consider the left pane root: it's a child of the Places root, and has
+ * children and grandchildren. `PlacesUIUtils` can create, delete, and
+ * recreate it as needed. We can't determine ancestors when the root or its
+ * children are deleted, because they've already been removed from the
+ * database when `onItemRemoved` is called. Likewise, we can't check their
+ * "exclude from backup" annos, because they've *also* been removed.
+ *
+ * So, we end up writing tombstones for the left pane queries and left
+ * pane root. For good measure, we'll also upload the Places root, because
+ * it's the parent of the left pane root.
+ *
+ * As a workaround, we can track the parent GUID and reconstruct the item's
+ * ancestry at sync time. This is complicated, and the previous behavior was
+ * already wrong, so we'll wait for bug 1258127 to fix this generally.
+ */
+ this._log.trace("onItemRemoved: " + itemId);
+ this._add(itemId, guid, /* isTombstone */ true);
+ this._add(parentId, parentGuid);
+ },
+
+ _ensureMobileQuery: function _ensureMobileQuery() {
+ let find = val =>
+ PlacesUtils.annotations.getItemsWithAnnotation(ORGANIZERQUERY_ANNO, {}).filter(
+ id => PlacesUtils.annotations.getItemAnnotation(id, ORGANIZERQUERY_ANNO) == val
+ );
+
+ // Don't continue if the Library isn't ready
+ let all = find(ALLBOOKMARKS_ANNO);
+ if (all.length == 0)
+ return;
+
+ let mobile = find(MOBILE_ANNO);
+ let queryURI = Utils.makeURI("place:folder=" + PlacesUtils.mobileFolderId);
+ let title = PlacesBundle.GetStringFromName("MobileBookmarksFolderTitle");
+
+ // Don't add OR remove the mobile bookmarks if there's nothing.
+ if (PlacesUtils.bookmarks.getIdForItemAt(PlacesUtils.mobileFolderId, 0) == -1) {
+ if (mobile.length != 0)
+ PlacesUtils.bookmarks.removeItem(mobile[0], SOURCE_SYNC);
+ }
+ // Add the mobile bookmarks query if it doesn't exist
+ else if (mobile.length == 0) {
+ let query = PlacesUtils.bookmarks.insertBookmark(all[0], queryURI, -1, title, /* guid */ null, SOURCE_SYNC);
+ PlacesUtils.annotations.setItemAnnotation(query, ORGANIZERQUERY_ANNO, MOBILE_ANNO, 0,
+ PlacesUtils.annotations.EXPIRE_NEVER, SOURCE_SYNC);
+ PlacesUtils.annotations.setItemAnnotation(query, PlacesUtils.EXCLUDE_FROM_BACKUP_ANNO, 1, 0,
+ PlacesUtils.annotations.EXPIRE_NEVER, SOURCE_SYNC);
+ }
+ // Make sure the existing query URL and title are correct
+ else {
+ if (!PlacesUtils.bookmarks.getBookmarkURI(mobile[0]).equals(queryURI)) {
+ PlacesUtils.bookmarks.changeBookmarkURI(mobile[0], queryURI,
+ SOURCE_SYNC);
+ }
+ let queryTitle = PlacesUtils.bookmarks.getItemTitle(mobile[0]);
+ if (queryTitle != title) {
+ PlacesUtils.bookmarks.setItemTitle(mobile[0], title, SOURCE_SYNC);
+ }
+ let rootTitle =
+ PlacesUtils.bookmarks.getItemTitle(PlacesUtils.mobileFolderId);
+ if (rootTitle != title) {
+ PlacesUtils.bookmarks.setItemTitle(PlacesUtils.mobileFolderId, title,
+ SOURCE_SYNC);
+ }
+ }
+ },
+
+ // This method is oddly structured, but the idea is to return as quickly as
+ // possible -- this handler gets called *every time* a bookmark changes, for
+ // *each change*.
+ onItemChanged: function BMT_onItemChanged(itemId, property, isAnno, value,
+ lastModified, itemType, parentId,
+ guid, parentGuid, oldValue,
+ source) {
+ if (IGNORED_SOURCES.includes(source)) {
+ return;
+ }
+
+ if (isAnno && (ANNOS_TO_TRACK.indexOf(property) == -1))
+ // Ignore annotations except for the ones that we sync.
+ return;
+
+ // Ignore favicon changes to avoid unnecessary churn.
+ if (property == "favicon")
+ return;
+
+ this._log.trace("onItemChanged: " + itemId +
+ (", " + property + (isAnno? " (anno)" : "")) +
+ (value ? (" = \"" + value + "\"") : ""));
+ this._add(itemId, guid);
+ },
+
+ onItemMoved: function BMT_onItemMoved(itemId, oldParent, oldIndex,
+ newParent, newIndex, itemType,
+ guid, oldParentGuid, newParentGuid,
+ source) {
+ if (IGNORED_SOURCES.includes(source)) {
+ return;
+ }
+
+ this._log.trace("onItemMoved: " + itemId);
+ this._add(oldParent, oldParentGuid);
+ if (oldParent != newParent) {
+ this._add(itemId, guid);
+ this._add(newParent, newParentGuid);
+ }
+
+ // Remove any position annotations now that the user moved the item
+ PlacesUtils.annotations.removeItemAnnotation(itemId,
+ PlacesSyncUtils.bookmarks.SYNC_PARENT_ANNO, SOURCE_SYNC);
+ },
+
+ onBeginUpdateBatch: function () {
+ ++this._batchDepth;
+ },
+ onEndUpdateBatch: function () {
+ if (--this._batchDepth === 0 && this._batchSawScoreIncrement) {
+ this.score += SCORE_INCREMENT_XLARGE;
+ this._batchSawScoreIncrement = false;
+ }
+ },
+ onItemVisited: function () {}
+};
+
+// Returns an array of root IDs to recursively query for synced bookmarks.
+// Items in other roots, including tags and organizer queries, will be
+// ignored.
+function getChangeRootIds() {
+ return [
+ PlacesUtils.bookmarksMenuFolderId,
+ PlacesUtils.toolbarFolderId,
+ PlacesUtils.unfiledBookmarksFolderId,
+ PlacesUtils.mobileFolderId,
+ ];
+}
+
+class BookmarksChangeset extends Changeset {
+ getModifiedTimestamp(id) {
+ let change = this.changes[id];
+ return change ? change.modified : Number.NaN;
+ }
+}
diff --git a/services/sync/modules/engines/clients.js b/services/sync/modules/engines/clients.js
new file mode 100644
index 000000000..3dd679570
--- /dev/null
+++ b/services/sync/modules/engines/clients.js
@@ -0,0 +1,782 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * How does the clients engine work?
+ *
+ * - We use 2 files - commands.json and commands-syncing.json.
+ *
+ * - At sync upload time, we attempt a rename of commands.json to
+ * commands-syncing.json, and ignore errors (helps for crash during sync!).
+ * - We load commands-syncing.json and stash the contents in
+ * _currentlySyncingCommands which lives for the duration of the upload process.
+ * - We use _currentlySyncingCommands to build the outgoing records
+ * - Immediately after successful upload, we delete commands-syncing.json from
+ * disk (and clear _currentlySyncingCommands). We reconcile our local records
+ * with what we just wrote in the server, and add failed IDs commands
+ * back in commands.json
+ * - Any time we need to "save" a command for future syncs, we load
+ * commands.json, update it, and write it back out.
+ */
+
+this.EXPORTED_SYMBOLS = [
+ "ClientEngine",
+ "ClientsRec"
+];
+
+var {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-common/stringbundle.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://gre/modules/Services.jsm");
+
+XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts",
+ "resource://gre/modules/FxAccounts.jsm");
+
+const CLIENTS_TTL = 1814400; // 21 days
+const CLIENTS_TTL_REFRESH = 604800; // 7 days
+const STALE_CLIENT_REMOTE_AGE = 604800; // 7 days
+
+const SUPPORTED_PROTOCOL_VERSIONS = ["1.1", "1.5"];
+
+function hasDupeCommand(commands, action) {
+ if (!commands) {
+ return false;
+ }
+ return commands.some(other => other.command == action.command &&
+ Utils.deepEquals(other.args, action.args));
+}
+
+this.ClientsRec = function ClientsRec(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+ClientsRec.prototype = {
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Sync.Record.Clients",
+ ttl: CLIENTS_TTL
+};
+
+Utils.deferGetSet(ClientsRec,
+ "cleartext",
+ ["name", "type", "commands",
+ "version", "protocols",
+ "formfactor", "os", "appPackage", "application", "device",
+ "fxaDeviceId"]);
+
+
+this.ClientEngine = function ClientEngine(service) {
+ SyncEngine.call(this, "Clients", service);
+
+ // Reset the last sync timestamp on every startup so that we fetch all clients
+ this.resetLastSync();
+}
+ClientEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _storeObj: ClientStore,
+ _recordObj: ClientsRec,
+ _trackerObj: ClientsTracker,
+ allowSkippedRecord: false,
+
+ // Always sync client data as it controls other sync behavior
+ get enabled() {
+ return true;
+ },
+
+ get lastRecordUpload() {
+ return Svc.Prefs.get(this.name + ".lastRecordUpload", 0);
+ },
+ set lastRecordUpload(value) {
+ Svc.Prefs.set(this.name + ".lastRecordUpload", Math.floor(value));
+ },
+
+ get remoteClients() {
+ // return all non-stale clients for external consumption.
+ return Object.values(this._store._remoteClients).filter(v => !v.stale);
+ },
+
+ remoteClientExists(id) {
+ let client = this._store._remoteClients[id];
+ return !!(client && !client.stale);
+ },
+
+ // Aggregate some stats on the composition of clients on this account
+ get stats() {
+ let stats = {
+ hasMobile: this.localType == DEVICE_TYPE_MOBILE,
+ names: [this.localName],
+ numClients: 1,
+ };
+
+ for (let id in this._store._remoteClients) {
+ let {name, type, stale} = this._store._remoteClients[id];
+ if (!stale) {
+ stats.hasMobile = stats.hasMobile || type == DEVICE_TYPE_MOBILE;
+ stats.names.push(name);
+ stats.numClients++;
+ }
+ }
+
+ return stats;
+ },
+
+ /**
+ * Obtain information about device types.
+ *
+ * Returns a Map of device types to integer counts.
+ */
+ get deviceTypes() {
+ let counts = new Map();
+
+ counts.set(this.localType, 1);
+
+ for (let id in this._store._remoteClients) {
+ let record = this._store._remoteClients[id];
+ if (record.stale) {
+ continue; // pretend "stale" records don't exist.
+ }
+ let type = record.type;
+ if (!counts.has(type)) {
+ counts.set(type, 0);
+ }
+
+ counts.set(type, counts.get(type) + 1);
+ }
+
+ return counts;
+ },
+
+ get localID() {
+ // Generate a random GUID id we don't have one
+ let localID = Svc.Prefs.get("client.GUID", "");
+ return localID == "" ? this.localID = Utils.makeGUID() : localID;
+ },
+ set localID(value) {
+ Svc.Prefs.set("client.GUID", value);
+ },
+
+ get brandName() {
+ let brand = new StringBundle("chrome://branding/locale/brand.properties");
+ return brand.get("brandShortName");
+ },
+
+ get localName() {
+ let name = Utils.getDeviceName();
+ // If `getDeviceName` returns the default name, set the pref. FxA registers
+ // the device before syncing, so we don't need to update the registration
+ // in this case.
+ Svc.Prefs.set("client.name", name);
+ return name;
+ },
+ set localName(value) {
+ Svc.Prefs.set("client.name", value);
+ // Update the registration in the background.
+ fxAccounts.updateDeviceRegistration().catch(error => {
+ this._log.warn("failed to update fxa device registration", error);
+ });
+ },
+
+ get localType() {
+ return Utils.getDeviceType();
+ },
+ set localType(value) {
+ Svc.Prefs.set("client.type", value);
+ },
+
+ getClientName(id) {
+ if (id == this.localID) {
+ return this.localName;
+ }
+ let client = this._store._remoteClients[id];
+ return client ? client.name : "";
+ },
+
+ getClientFxaDeviceId(id) {
+ if (this._store._remoteClients[id]) {
+ return this._store._remoteClients[id].fxaDeviceId;
+ }
+ return null;
+ },
+
+ isMobile: function isMobile(id) {
+ if (this._store._remoteClients[id])
+ return this._store._remoteClients[id].type == DEVICE_TYPE_MOBILE;
+ return false;
+ },
+
+ _readCommands() {
+ let cb = Async.makeSpinningCallback();
+ Utils.jsonLoad("commands", this, commands => cb(null, commands));
+ return cb.wait() || {};
+ },
+
+ /**
+ * Low level function, do not use directly (use _addClientCommand instead).
+ */
+ _saveCommands(commands) {
+ let cb = Async.makeSpinningCallback();
+ Utils.jsonSave("commands", this, commands, error => {
+ if (error) {
+ this._log.error("Failed to save JSON outgoing commands", error);
+ }
+ cb();
+ });
+ cb.wait();
+ },
+
+ _prepareCommandsForUpload() {
+ let cb = Async.makeSpinningCallback();
+ Utils.jsonMove("commands", "commands-syncing", this).catch(() => {}) // Ignore errors
+ .then(() => {
+ Utils.jsonLoad("commands-syncing", this, commands => cb(null, commands));
+ });
+ return cb.wait() || {};
+ },
+
+ _deleteUploadedCommands() {
+ delete this._currentlySyncingCommands;
+ Async.promiseSpinningly(
+ Utils.jsonRemove("commands-syncing", this).catch(err => {
+ this._log.error("Failed to delete syncing-commands file", err);
+ })
+ );
+ },
+
+ _addClientCommand(clientId, command) {
+ const allCommands = this._readCommands();
+ const clientCommands = allCommands[clientId] || [];
+ if (hasDupeCommand(clientCommands, command)) {
+ return;
+ }
+ allCommands[clientId] = clientCommands.concat(command);
+ this._saveCommands(allCommands);
+ },
+
+ _syncStartup: function _syncStartup() {
+ // Reupload new client record periodically.
+ if (Date.now() / 1000 - this.lastRecordUpload > CLIENTS_TTL_REFRESH) {
+ this._tracker.addChangedID(this.localID);
+ this.lastRecordUpload = Date.now() / 1000;
+ }
+ SyncEngine.prototype._syncStartup.call(this);
+ },
+
+ _processIncoming() {
+ // Fetch all records from the server.
+ this.lastSync = 0;
+ this._incomingClients = {};
+ try {
+ SyncEngine.prototype._processIncoming.call(this);
+ // Since clients are synced unconditionally, any records in the local store
+ // that don't exist on the server must be for disconnected clients. Remove
+ // them, so that we don't upload records with commands for clients that will
+ // never see them. We also do this to filter out stale clients from the
+ // tabs collection, since showing their list of tabs is confusing.
+ for (let id in this._store._remoteClients) {
+ if (!this._incomingClients[id]) {
+ this._log.info(`Removing local state for deleted client ${id}`);
+ this._removeRemoteClient(id);
+ }
+ }
+ // Bug 1264498: Mobile clients don't remove themselves from the clients
+ // collection when the user disconnects Sync, so we mark as stale clients
+ // with the same name that haven't synced in over a week.
+ // (Note we can't simply delete them, or we re-apply them next sync - see
+ // bug 1287687)
+ delete this._incomingClients[this.localID];
+ let names = new Set([this.localName]);
+ for (let id in this._incomingClients) {
+ let record = this._store._remoteClients[id];
+ if (!names.has(record.name)) {
+ names.add(record.name);
+ continue;
+ }
+ let remoteAge = AsyncResource.serverTime - this._incomingClients[id];
+ if (remoteAge > STALE_CLIENT_REMOTE_AGE) {
+ this._log.info(`Hiding stale client ${id} with age ${remoteAge}`);
+ record.stale = true;
+ }
+ }
+ } finally {
+ this._incomingClients = null;
+ }
+ },
+
+ _uploadOutgoing() {
+ this._currentlySyncingCommands = this._prepareCommandsForUpload();
+ const clientWithPendingCommands = Object.keys(this._currentlySyncingCommands);
+ for (let clientId of clientWithPendingCommands) {
+ if (this._store._remoteClients[clientId] || this.localID == clientId) {
+ this._modified.set(clientId, 0);
+ }
+ }
+ SyncEngine.prototype._uploadOutgoing.call(this);
+ },
+
+ _onRecordsWritten(succeeded, failed) {
+ // Reconcile the status of the local records with what we just wrote on the
+ // server
+ for (let id of succeeded) {
+ const commandChanges = this._currentlySyncingCommands[id];
+ if (id == this.localID) {
+ if (this.localCommands) {
+ this.localCommands = this.localCommands.filter(command => !hasDupeCommand(commandChanges, command));
+ }
+ } else {
+ const clientRecord = this._store._remoteClients[id];
+ if (!commandChanges || !clientRecord) {
+ // should be impossible, else we wouldn't have been writing it.
+ this._log.warn("No command/No record changes for a client we uploaded");
+ continue;
+ }
+ // fixup the client record, so our copy of _remoteClients matches what we uploaded.
+ clientRecord.commands = this._store.createRecord(id);
+ // we could do better and pass the reference to the record we just uploaded,
+ // but this will do for now
+ }
+ }
+
+ // Re-add failed commands
+ for (let id of failed) {
+ const commandChanges = this._currentlySyncingCommands[id];
+ if (!commandChanges) {
+ continue;
+ }
+ this._addClientCommand(id, commandChanges);
+ }
+
+ this._deleteUploadedCommands();
+
+ // Notify other devices that their own client collection changed
+ const idsToNotify = succeeded.reduce((acc, id) => {
+ if (id == this.localID) {
+ return acc;
+ }
+ const fxaDeviceId = this.getClientFxaDeviceId(id);
+ return fxaDeviceId ? acc.concat(fxaDeviceId) : acc;
+ }, []);
+ if (idsToNotify.length > 0) {
+ this._notifyCollectionChanged(idsToNotify);
+ }
+ },
+
+ _notifyCollectionChanged(ids) {
+ const message = {
+ version: 1,
+ command: "sync:collection_changed",
+ data: {
+ collections: ["clients"]
+ }
+ };
+ fxAccounts.notifyDevices(ids, message, NOTIFY_TAB_SENT_TTL_SECS);
+ },
+
+ _syncFinish() {
+ // Record histograms for our device types, and also write them to a pref
+ // so non-histogram telemetry (eg, UITelemetry) has easy access to them.
+ for (let [deviceType, count] of this.deviceTypes) {
+ let hid;
+ let prefName = this.name + ".devices.";
+ switch (deviceType) {
+ case "desktop":
+ hid = "WEAVE_DEVICE_COUNT_DESKTOP";
+ prefName += "desktop";
+ break;
+ case "mobile":
+ hid = "WEAVE_DEVICE_COUNT_MOBILE";
+ prefName += "mobile";
+ break;
+ default:
+ this._log.warn(`Unexpected deviceType "${deviceType}" recording device telemetry.`);
+ continue;
+ }
+ Services.telemetry.getHistogramById(hid).add(count);
+ Svc.Prefs.set(prefName, count);
+ }
+ SyncEngine.prototype._syncFinish.call(this);
+ },
+
+ _reconcile: function _reconcile(item) {
+ // Every incoming record is reconciled, so we use this to track the
+ // contents of the collection on the server.
+ this._incomingClients[item.id] = item.modified;
+
+ if (!this._store.itemExists(item.id)) {
+ return true;
+ }
+ // Clients are synced unconditionally, so we'll always have new records.
+ // Unfortunately, this will cause the scheduler to use the immediate sync
+ // interval for the multi-device case, instead of the active interval. We
+ // work around this by updating the record during reconciliation, and
+ // returning false to indicate that the record doesn't need to be applied
+ // later.
+ this._store.update(item);
+ return false;
+ },
+
+ // Treat reset the same as wiping for locally cached clients
+ _resetClient() {
+ this._wipeClient();
+ },
+
+ _wipeClient: function _wipeClient() {
+ SyncEngine.prototype._resetClient.call(this);
+ delete this.localCommands;
+ this._store.wipe();
+ const logRemoveError = err => this._log.warn("Could not delete json file", err);
+ Async.promiseSpinningly(
+ Utils.jsonRemove("commands", this).catch(logRemoveError)
+ .then(Utils.jsonRemove("commands-syncing", this).catch(logRemoveError))
+ );
+ },
+
+ removeClientData: function removeClientData() {
+ let res = this.service.resource(this.engineURL + "/" + this.localID);
+ res.delete();
+ },
+
+ // Override the default behavior to delete bad records from the server.
+ handleHMACMismatch: function handleHMACMismatch(item, mayRetry) {
+ this._log.debug("Handling HMAC mismatch for " + item.id);
+
+ let base = SyncEngine.prototype.handleHMACMismatch.call(this, item, mayRetry);
+ if (base != SyncEngine.kRecoveryStrategy.error)
+ return base;
+
+ // It's a bad client record. Save it to be deleted at the end of the sync.
+ this._log.debug("Bad client record detected. Scheduling for deletion.");
+ this._deleteId(item.id);
+
+ // Neither try again nor error; we're going to delete it.
+ return SyncEngine.kRecoveryStrategy.ignore;
+ },
+
+ /**
+ * A hash of valid commands that the client knows about. The key is a command
+ * and the value is a hash containing information about the command such as
+ * number of arguments and description.
+ */
+ _commands: {
+ resetAll: { args: 0, desc: "Clear temporary local data for all engines" },
+ resetEngine: { args: 1, desc: "Clear temporary local data for engine" },
+ wipeAll: { args: 0, desc: "Delete all client data for all engines" },
+ wipeEngine: { args: 1, desc: "Delete all client data for engine" },
+ logout: { args: 0, desc: "Log out client" },
+ displayURI: { args: 3, desc: "Instruct a client to display a URI" },
+ },
+
+ /**
+ * Sends a command+args pair to a specific client.
+ *
+ * @param command Command string
+ * @param args Array of arguments/data for command
+ * @param clientId Client to send command to
+ */
+ _sendCommandToClient: function sendCommandToClient(command, args, clientId) {
+ this._log.trace("Sending " + command + " to " + clientId);
+
+ let client = this._store._remoteClients[clientId];
+ if (!client) {
+ throw new Error("Unknown remote client ID: '" + clientId + "'.");
+ }
+ if (client.stale) {
+ throw new Error("Stale remote client ID: '" + clientId + "'.");
+ }
+
+ let action = {
+ command: command,
+ args: args,
+ };
+
+ this._log.trace("Client " + clientId + " got a new action: " + [command, args]);
+ this._addClientCommand(clientId, action);
+ this._tracker.addChangedID(clientId);
+ },
+
+ /**
+ * Check if the local client has any remote commands and perform them.
+ *
+ * @return false to abort sync
+ */
+ processIncomingCommands: function processIncomingCommands() {
+ return this._notify("clients:process-commands", "", function() {
+ if (!this.localCommands) {
+ return true;
+ }
+
+ const clearedCommands = this._readCommands()[this.localID];
+ const commands = this.localCommands.filter(command => !hasDupeCommand(clearedCommands, command));
+
+ let URIsToDisplay = [];
+ // Process each command in order.
+ for (let rawCommand of commands) {
+ let {command, args} = rawCommand;
+ this._log.debug("Processing command: " + command + "(" + args + ")");
+
+ let engines = [args[0]];
+ switch (command) {
+ case "resetAll":
+ engines = null;
+ // Fallthrough
+ case "resetEngine":
+ this.service.resetClient(engines);
+ break;
+ case "wipeAll":
+ engines = null;
+ // Fallthrough
+ case "wipeEngine":
+ this.service.wipeClient(engines);
+ break;
+ case "logout":
+ this.service.logout();
+ return false;
+ case "displayURI":
+ let [uri, clientId, title] = args;
+ URIsToDisplay.push({ uri, clientId, title });
+ break;
+ default:
+ this._log.debug("Received an unknown command: " + command);
+ break;
+ }
+ // Add the command to the "cleared" commands list
+ this._addClientCommand(this.localID, rawCommand)
+ }
+ this._tracker.addChangedID(this.localID);
+
+ if (URIsToDisplay.length) {
+ this._handleDisplayURIs(URIsToDisplay);
+ }
+
+ return true;
+ })();
+ },
+
+ /**
+ * Validates and sends a command to a client or all clients.
+ *
+ * Calling this does not actually sync the command data to the server. If the
+ * client already has the command/args pair, it won't receive a duplicate
+ * command.
+ *
+ * @param command
+ * Command to invoke on remote clients
+ * @param args
+ * Array of arguments to give to the command
+ * @param clientId
+ * Client ID to send command to. If undefined, send to all remote
+ * clients.
+ */
+ sendCommand: function sendCommand(command, args, clientId) {
+ let commandData = this._commands[command];
+ // Don't send commands that we don't know about.
+ if (!commandData) {
+ this._log.error("Unknown command to send: " + command);
+ return;
+ }
+ // Don't send a command with the wrong number of arguments.
+ else if (!args || args.length != commandData.args) {
+ this._log.error("Expected " + commandData.args + " args for '" +
+ command + "', but got " + args);
+ return;
+ }
+
+ if (clientId) {
+ this._sendCommandToClient(command, args, clientId);
+ } else {
+ for (let [id, record] of Object.entries(this._store._remoteClients)) {
+ if (!record.stale) {
+ this._sendCommandToClient(command, args, id);
+ }
+ }
+ }
+ },
+
+ /**
+ * Send a URI to another client for display.
+ *
+ * A side effect is the score is increased dramatically to incur an
+ * immediate sync.
+ *
+ * If an unknown client ID is specified, sendCommand() will throw an
+ * Error object.
+ *
+ * @param uri
+ * URI (as a string) to send and display on the remote client
+ * @param clientId
+ * ID of client to send the command to. If not defined, will be sent
+ * to all remote clients.
+ * @param title
+ * Title of the page being sent.
+ */
+ sendURIToClientForDisplay: function sendURIToClientForDisplay(uri, clientId, title) {
+ this._log.info("Sending URI to client: " + uri + " -> " +
+ clientId + " (" + title + ")");
+ this.sendCommand("displayURI", [uri, this.localID, title], clientId);
+
+ this._tracker.score += SCORE_INCREMENT_XLARGE;
+ },
+
+ /**
+ * Handle a bunch of received 'displayURI' commands.
+ *
+ * Interested parties should observe the "weave:engine:clients:display-uris"
+ * topic. The callback will receive an array as the subject parameter
+ * containing objects with the following keys:
+ *
+ * uri URI (string) that is requested for display.
+ * clientId ID of client that sent the command.
+ * title Title of page that loaded URI (likely) corresponds to.
+ *
+ * The 'data' parameter to the callback will not be defined.
+ *
+ * @param uris
+ * An array containing URI objects to display
+ * @param uris[].uri
+ * String URI that was received
+ * @param uris[].clientId
+ * ID of client that sent URI
+ * @param uris[].title
+ * String title of page that URI corresponds to. Older clients may not
+ * send this.
+ */
+ _handleDisplayURIs: function _handleDisplayURIs(uris) {
+ Svc.Obs.notify("weave:engine:clients:display-uris", uris);
+ },
+
+ _removeRemoteClient(id) {
+ delete this._store._remoteClients[id];
+ this._tracker.removeChangedID(id);
+ },
+};
+
+function ClientStore(name, engine) {
+ Store.call(this, name, engine);
+}
+ClientStore.prototype = {
+ __proto__: Store.prototype,
+
+ _remoteClients: {},
+
+ create(record) {
+ this.update(record);
+ },
+
+ update: function update(record) {
+ if (record.id == this.engine.localID) {
+ // Only grab commands from the server; local name/type always wins
+ this.engine.localCommands = record.commands;
+ } else {
+ this._remoteClients[record.id] = record.cleartext;
+ }
+ },
+
+ createRecord: function createRecord(id, collection) {
+ let record = new ClientsRec(collection, id);
+
+ const commandsChanges = this.engine._currentlySyncingCommands ?
+ this.engine._currentlySyncingCommands[id] :
+ [];
+
+ // Package the individual components into a record for the local client
+ if (id == this.engine.localID) {
+ let cb = Async.makeSpinningCallback();
+ fxAccounts.getDeviceId().then(id => cb(null, id), cb);
+ try {
+ record.fxaDeviceId = cb.wait();
+ } catch(error) {
+ this._log.warn("failed to get fxa device id", error);
+ }
+ record.name = this.engine.localName;
+ record.type = this.engine.localType;
+ record.version = Services.appinfo.version;
+ record.protocols = SUPPORTED_PROTOCOL_VERSIONS;
+
+ // Substract the commands we recorded that we've already executed
+ if (commandsChanges && commandsChanges.length &&
+ this.engine.localCommands && this.engine.localCommands.length) {
+ record.commands = this.engine.localCommands.filter(command => !hasDupeCommand(commandsChanges, command));
+ }
+
+ // Optional fields.
+ record.os = Services.appinfo.OS; // "Darwin"
+ record.appPackage = Services.appinfo.ID;
+ record.application = this.engine.brandName // "Nightly"
+
+ // We can't compute these yet.
+ // record.device = ""; // Bug 1100723
+ // record.formfactor = ""; // Bug 1100722
+ } else {
+ record.cleartext = this._remoteClients[id];
+
+ // Add the commands we have to send
+ if (commandsChanges && commandsChanges.length) {
+ const recordCommands = record.cleartext.commands || [];
+ const newCommands = commandsChanges.filter(command => !hasDupeCommand(recordCommands, command));
+ record.cleartext.commands = recordCommands.concat(newCommands);
+ }
+
+ if (record.cleartext.stale) {
+ // It's almost certainly a logic error for us to upload a record we
+ // consider stale, so make log noise, but still remove the flag.
+ this._log.error(`Preparing to upload record ${id} that we consider stale`);
+ delete record.cleartext.stale;
+ }
+ }
+
+ return record;
+ },
+
+ itemExists(id) {
+ return id in this.getAllIDs();
+ },
+
+ getAllIDs: function getAllIDs() {
+ let ids = {};
+ ids[this.engine.localID] = true;
+ for (let id in this._remoteClients)
+ ids[id] = true;
+ return ids;
+ },
+
+ wipe: function wipe() {
+ this._remoteClients = {};
+ },
+};
+
+function ClientsTracker(name, engine) {
+ Tracker.call(this, name, engine);
+ Svc.Obs.add("weave:engine:start-tracking", this);
+ Svc.Obs.add("weave:engine:stop-tracking", this);
+}
+ClientsTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ _enabled: false,
+
+ observe: function observe(subject, topic, data) {
+ switch (topic) {
+ case "weave:engine:start-tracking":
+ if (!this._enabled) {
+ Svc.Prefs.observe("client.name", this);
+ this._enabled = true;
+ }
+ break;
+ case "weave:engine:stop-tracking":
+ if (this._enabled) {
+ Svc.Prefs.ignore("client.name", this);
+ this._enabled = false;
+ }
+ break;
+ case "nsPref:changed":
+ this._log.debug("client.name preference changed");
+ this.addChangedID(Svc.Prefs.get("client.GUID"));
+ this.score += SCORE_INCREMENT_XLARGE;
+ break;
+ }
+ }
+};
diff --git a/services/sync/modules/engines/extension-storage.js b/services/sync/modules/engines/extension-storage.js
new file mode 100644
index 000000000..f8f15b128
--- /dev/null
+++ b/services/sync/modules/engines/extension-storage.js
@@ -0,0 +1,277 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = ['ExtensionStorageEngine', 'EncryptionRemoteTransformer',
+ 'KeyRingEncryptionRemoteTransformer'];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/async.js");
+XPCOMUtils.defineLazyModuleGetter(this, "ExtensionStorageSync",
+ "resource://gre/modules/ExtensionStorageSync.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts",
+ "resource://gre/modules/FxAccounts.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "Task",
+ "resource://gre/modules/Task.jsm");
+
+/**
+ * The Engine that manages syncing for the web extension "storage"
+ * API, and in particular ext.storage.sync.
+ *
+ * ext.storage.sync is implemented using Kinto, so it has mechanisms
+ * for syncing that we do not need to integrate in the Firefox Sync
+ * framework, so this is something of a stub.
+ */
+this.ExtensionStorageEngine = function ExtensionStorageEngine(service) {
+ SyncEngine.call(this, "Extension-Storage", service);
+};
+ExtensionStorageEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _trackerObj: ExtensionStorageTracker,
+ // we don't need these since we implement our own sync logic
+ _storeObj: undefined,
+ _recordObj: undefined,
+
+ syncPriority: 10,
+ allowSkippedRecord: false,
+
+ _sync: function () {
+ return Async.promiseSpinningly(ExtensionStorageSync.syncAll());
+ },
+
+ get enabled() {
+ // By default, we sync extension storage if we sync addons. This
+ // lets us simplify the UX since users probably don't consider
+ // "extension preferences" a separate category of syncing.
+ // However, we also respect engine.extension-storage.force, which
+ // can be set to true or false, if a power user wants to customize
+ // the behavior despite the lack of UI.
+ const forced = Svc.Prefs.get("engine." + this.prefName + ".force", undefined);
+ if (forced !== undefined) {
+ return forced;
+ }
+ return Svc.Prefs.get("engine.addons", false);
+ },
+};
+
+function ExtensionStorageTracker(name, engine) {
+ Tracker.call(this, name, engine);
+}
+ExtensionStorageTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ startTracking: function () {
+ Svc.Obs.add("ext.storage.sync-changed", this);
+ },
+
+ stopTracking: function () {
+ Svc.Obs.remove("ext.storage.sync-changed", this);
+ },
+
+ observe: function (subject, topic, data) {
+ Tracker.prototype.observe.call(this, subject, topic, data);
+
+ if (this.ignoreAll) {
+ return;
+ }
+
+ if (topic !== "ext.storage.sync-changed") {
+ return;
+ }
+
+ // Single adds, removes and changes are not so important on their
+ // own, so let's just increment score a bit.
+ this.score += SCORE_INCREMENT_MEDIUM;
+ },
+
+ // Override a bunch of methods which don't do anything for us.
+ // This is a performance hack.
+ saveChangedIDs: function() {
+ },
+ loadChangedIDs: function() {
+ },
+ ignoreID: function() {
+ },
+ unignoreID: function() {
+ },
+ addChangedID: function() {
+ },
+ removeChangedID: function() {
+ },
+ clearChangedIDs: function() {
+ },
+};
+
+/**
+ * Utility function to enforce an order of fields when computing an HMAC.
+ */
+function ciphertextHMAC(keyBundle, id, IV, ciphertext) {
+ const hasher = keyBundle.sha256HMACHasher;
+ return Utils.bytesAsHex(Utils.digestUTF8(id + IV + ciphertext, hasher));
+}
+
+/**
+ * A "remote transformer" that the Kinto library will use to
+ * encrypt/decrypt records when syncing.
+ *
+ * This is an "abstract base class". Subclass this and override
+ * getKeys() to use it.
+ */
+class EncryptionRemoteTransformer {
+ encode(record) {
+ const self = this;
+ return Task.spawn(function* () {
+ const keyBundle = yield self.getKeys();
+ if (record.ciphertext) {
+ throw new Error("Attempt to reencrypt??");
+ }
+ let id = record.id;
+ if (!record.id) {
+ throw new Error("Record ID is missing or invalid");
+ }
+
+ let IV = Svc.Crypto.generateRandomIV();
+ let ciphertext = Svc.Crypto.encrypt(JSON.stringify(record),
+ keyBundle.encryptionKeyB64, IV);
+ let hmac = ciphertextHMAC(keyBundle, id, IV, ciphertext);
+ const encryptedResult = {ciphertext, IV, hmac, id};
+ if (record.hasOwnProperty("last_modified")) {
+ encryptedResult.last_modified = record.last_modified;
+ }
+ return encryptedResult;
+ });
+ }
+
+ decode(record) {
+ const self = this;
+ return Task.spawn(function* () {
+ if (!record.ciphertext) {
+ // This can happen for tombstones if a record is deleted.
+ if (record.deleted) {
+ return record;
+ }
+ throw new Error("No ciphertext: nothing to decrypt?");
+ }
+ const keyBundle = yield self.getKeys();
+ // Authenticate the encrypted blob with the expected HMAC
+ let computedHMAC = ciphertextHMAC(keyBundle, record.id, record.IV, record.ciphertext);
+
+ if (computedHMAC != record.hmac) {
+ Utils.throwHMACMismatch(record.hmac, computedHMAC);
+ }
+
+ // Handle invalid data here. Elsewhere we assume that cleartext is an object.
+ let cleartext = Svc.Crypto.decrypt(record.ciphertext,
+ keyBundle.encryptionKeyB64, record.IV);
+ let jsonResult = JSON.parse(cleartext);
+ if (!jsonResult || typeof jsonResult !== "object") {
+ throw new Error("Decryption failed: result is <" + jsonResult + ">, not an object.");
+ }
+
+ // Verify that the encrypted id matches the requested record's id.
+ // This should always be true, because we compute the HMAC over
+ // the original record's ID, and that was verified already (above).
+ if (jsonResult.id != record.id) {
+ throw new Error("Record id mismatch: " + jsonResult.id + " != " + record.id);
+ }
+
+ if (record.hasOwnProperty("last_modified")) {
+ jsonResult.last_modified = record.last_modified;
+ }
+
+ return jsonResult;
+ });
+ }
+
+ /**
+ * Retrieve keys to use during encryption.
+ *
+ * Returns a Promise<KeyBundle>.
+ */
+ getKeys() {
+ throw new Error("override getKeys in a subclass");
+ }
+}
+// You can inject this
+EncryptionRemoteTransformer.prototype._fxaService = fxAccounts;
+
+/**
+ * An EncryptionRemoteTransformer that provides a keybundle derived
+ * from the user's kB, suitable for encrypting a keyring.
+ */
+class KeyRingEncryptionRemoteTransformer extends EncryptionRemoteTransformer {
+ getKeys() {
+ const self = this;
+ return Task.spawn(function* () {
+ const user = yield self._fxaService.getSignedInUser();
+ // FIXME: we should permit this if the user is self-hosting
+ // their storage
+ if (!user) {
+ throw new Error("user isn't signed in to FxA; can't sync");
+ }
+
+ if (!user.kB) {
+ throw new Error("user doesn't have kB");
+ }
+
+ let kB = Utils.hexToBytes(user.kB);
+
+ let keyMaterial = CryptoUtils.hkdf(kB, undefined,
+ "identity.mozilla.com/picl/v1/chrome.storage.sync", 2*32);
+ let bundle = new BulkKeyBundle();
+ // [encryptionKey, hmacKey]
+ bundle.keyPair = [keyMaterial.slice(0, 32), keyMaterial.slice(32, 64)];
+ return bundle;
+ });
+ }
+ // Pass through the kbHash field from the unencrypted record. If
+ // encryption fails, we can use this to try to detect whether we are
+ // being compromised or if the record here was encoded with a
+ // different kB.
+ encode(record) {
+ const encodePromise = super.encode(record);
+ return Task.spawn(function* () {
+ const encoded = yield encodePromise;
+ encoded.kbHash = record.kbHash;
+ return encoded;
+ });
+ }
+
+ decode(record) {
+ const decodePromise = super.decode(record);
+ return Task.spawn(function* () {
+ try {
+ return yield decodePromise;
+ } catch (e) {
+ if (Utils.isHMACMismatch(e)) {
+ const currentKBHash = yield ExtensionStorageSync.getKBHash();
+ if (record.kbHash != currentKBHash) {
+ // Some other client encoded this with a kB that we don't
+ // have access to.
+ KeyRingEncryptionRemoteTransformer.throwOutdatedKB(currentKBHash, record.kbHash);
+ }
+ }
+ throw e;
+ }
+ });
+ }
+
+ // Generator and discriminator for KB-is-outdated exceptions.
+ static throwOutdatedKB(shouldBe, is) {
+ throw new Error(`kB hash on record is outdated: should be ${shouldBe}, is ${is}`);
+ }
+
+ static isOutdatedKB(exc) {
+ const kbMessage = "kB hash on record is outdated: ";
+ return exc && exc.message && exc.message.indexOf &&
+ (exc.message.indexOf(kbMessage) == 0);
+ }
+}
diff --git a/services/sync/modules/engines/forms.js b/services/sync/modules/engines/forms.js
new file mode 100644
index 000000000..43f79d4f7
--- /dev/null
+++ b/services/sync/modules/engines/forms.js
@@ -0,0 +1,305 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ['FormEngine', 'FormRec', 'FormValidator'];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/collection_validator.js");
+Cu.import("resource://gre/modules/Log.jsm");
+
+const FORMS_TTL = 3 * 365 * 24 * 60 * 60; // Three years in seconds.
+
+this.FormRec = function FormRec(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+FormRec.prototype = {
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Sync.Record.Form",
+ ttl: FORMS_TTL
+};
+
+Utils.deferGetSet(FormRec, "cleartext", ["name", "value"]);
+
+
+var FormWrapper = {
+ _log: Log.repository.getLogger("Sync.Engine.Forms"),
+
+ _getEntryCols: ["fieldname", "value"],
+ _guidCols: ["guid"],
+
+ _promiseSearch: function(terms, searchData) {
+ return new Promise(resolve => {
+ let results = [];
+ let callbacks = {
+ handleResult(result) {
+ results.push(result);
+ },
+ handleCompletion(reason) {
+ resolve(results);
+ }
+ };
+ Svc.FormHistory.search(terms, searchData, callbacks);
+ })
+ },
+
+ // Do a "sync" search by spinning the event loop until it completes.
+ _searchSpinningly: function(terms, searchData) {
+ return Async.promiseSpinningly(this._promiseSearch(terms, searchData));
+ },
+
+ _updateSpinningly: function(changes) {
+ if (!Svc.FormHistory.enabled) {
+ return; // update isn't going to do anything.
+ }
+ let cb = Async.makeSpinningCallback();
+ let callbacks = {
+ handleCompletion: function(reason) {
+ cb();
+ }
+ };
+ Svc.FormHistory.update(changes, callbacks);
+ return cb.wait();
+ },
+
+ getEntry: function (guid) {
+ let results = this._searchSpinningly(this._getEntryCols, {guid: guid});
+ if (!results.length) {
+ return null;
+ }
+ return {name: results[0].fieldname, value: results[0].value};
+ },
+
+ getGUID: function (name, value) {
+ // Query for the provided entry.
+ let query = { fieldname: name, value: value };
+ let results = this._searchSpinningly(this._guidCols, query);
+ return results.length ? results[0].guid : null;
+ },
+
+ hasGUID: function (guid) {
+ // We could probably use a count function here, but searchSpinningly exists...
+ return this._searchSpinningly(this._guidCols, {guid: guid}).length != 0;
+ },
+
+ replaceGUID: function (oldGUID, newGUID) {
+ let changes = {
+ op: "update",
+ guid: oldGUID,
+ newGuid: newGUID,
+ }
+ this._updateSpinningly(changes);
+ }
+
+};
+
+this.FormEngine = function FormEngine(service) {
+ SyncEngine.call(this, "Forms", service);
+}
+FormEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _storeObj: FormStore,
+ _trackerObj: FormTracker,
+ _recordObj: FormRec,
+ applyIncomingBatchSize: FORMS_STORE_BATCH_SIZE,
+
+ syncPriority: 6,
+
+ get prefName() {
+ return "history";
+ },
+
+ _findDupe: function _findDupe(item) {
+ return FormWrapper.getGUID(item.name, item.value);
+ }
+};
+
+function FormStore(name, engine) {
+ Store.call(this, name, engine);
+}
+FormStore.prototype = {
+ __proto__: Store.prototype,
+
+ _processChange: function (change) {
+ // If this._changes is defined, then we are applying a batch, so we
+ // can defer it.
+ if (this._changes) {
+ this._changes.push(change);
+ return;
+ }
+
+ // Otherwise we must handle the change synchronously, right now.
+ FormWrapper._updateSpinningly(change);
+ },
+
+ applyIncomingBatch: function (records) {
+ // We collect all the changes to be made then apply them all at once.
+ this._changes = [];
+ let failures = Store.prototype.applyIncomingBatch.call(this, records);
+ if (this._changes.length) {
+ FormWrapper._updateSpinningly(this._changes);
+ }
+ delete this._changes;
+ return failures;
+ },
+
+ getAllIDs: function () {
+ let results = FormWrapper._searchSpinningly(["guid"], [])
+ let guids = {};
+ for (let result of results) {
+ guids[result.guid] = true;
+ }
+ return guids;
+ },
+
+ changeItemID: function (oldID, newID) {
+ FormWrapper.replaceGUID(oldID, newID);
+ },
+
+ itemExists: function (id) {
+ return FormWrapper.hasGUID(id);
+ },
+
+ createRecord: function (id, collection) {
+ let record = new FormRec(collection, id);
+ let entry = FormWrapper.getEntry(id);
+ if (entry != null) {
+ record.name = entry.name;
+ record.value = entry.value;
+ } else {
+ record.deleted = true;
+ }
+ return record;
+ },
+
+ create: function (record) {
+ this._log.trace("Adding form record for " + record.name);
+ let change = {
+ op: "add",
+ fieldname: record.name,
+ value: record.value
+ };
+ this._processChange(change);
+ },
+
+ remove: function (record) {
+ this._log.trace("Removing form record: " + record.id);
+ let change = {
+ op: "remove",
+ guid: record.id
+ };
+ this._processChange(change);
+ },
+
+ update: function (record) {
+ this._log.trace("Ignoring form record update request!");
+ },
+
+ wipe: function () {
+ let change = {
+ op: "remove"
+ };
+ FormWrapper._updateSpinningly(change);
+ }
+};
+
+function FormTracker(name, engine) {
+ Tracker.call(this, name, engine);
+}
+FormTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ QueryInterface: XPCOMUtils.generateQI([
+ Ci.nsIObserver,
+ Ci.nsISupportsWeakReference]),
+
+ startTracking: function() {
+ Svc.Obs.add("satchel-storage-changed", this);
+ },
+
+ stopTracking: function() {
+ Svc.Obs.remove("satchel-storage-changed", this);
+ },
+
+ observe: function (subject, topic, data) {
+ Tracker.prototype.observe.call(this, subject, topic, data);
+ if (this.ignoreAll) {
+ return;
+ }
+ switch (topic) {
+ case "satchel-storage-changed":
+ if (data == "formhistory-add" || data == "formhistory-remove") {
+ let guid = subject.QueryInterface(Ci.nsISupportsString).toString();
+ this.trackEntry(guid);
+ }
+ break;
+ }
+ },
+
+ trackEntry: function (guid) {
+ this.addChangedID(guid);
+ this.score += SCORE_INCREMENT_MEDIUM;
+ },
+};
+
+
+class FormsProblemData extends CollectionProblemData {
+ getSummary() {
+ // We don't support syncing deleted form data, so "clientMissing" isn't a problem
+ return super.getSummary().filter(entry =>
+ entry.name !== "clientMissing");
+ }
+}
+
+class FormValidator extends CollectionValidator {
+ constructor() {
+ super("forms", "id", ["name", "value"]);
+ }
+
+ emptyProblemData() {
+ return new FormsProblemData();
+ }
+
+ getClientItems() {
+ return FormWrapper._promiseSearch(["guid", "fieldname", "value"], {});
+ }
+
+ normalizeClientItem(item) {
+ return {
+ id: item.guid,
+ guid: item.guid,
+ name: item.fieldname,
+ fieldname: item.fieldname,
+ value: item.value,
+ original: item,
+ };
+ }
+
+ normalizeServerItem(item) {
+ let res = Object.assign({
+ guid: item.id,
+ fieldname: item.name,
+ original: item,
+ }, item);
+ // Missing `name` or `value` causes the getGUID call to throw
+ if (item.name !== undefined && item.value !== undefined) {
+ let guid = FormWrapper.getGUID(item.name, item.value);
+ if (guid) {
+ res.guid = guid;
+ res.id = guid;
+ res.duped = true;
+ }
+ }
+
+ return res;
+ }
+} \ No newline at end of file
diff --git a/services/sync/modules/engines/history.js b/services/sync/modules/engines/history.js
new file mode 100644
index 000000000..307d484c1
--- /dev/null
+++ b/services/sync/modules/engines/history.js
@@ -0,0 +1,442 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ['HistoryEngine', 'HistoryRec'];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+var Cr = Components.results;
+
+const HISTORY_TTL = 5184000; // 60 days
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm", this);
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+
+this.HistoryRec = function HistoryRec(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+HistoryRec.prototype = {
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Sync.Record.History",
+ ttl: HISTORY_TTL
+};
+
+Utils.deferGetSet(HistoryRec, "cleartext", ["histUri", "title", "visits"]);
+
+
+this.HistoryEngine = function HistoryEngine(service) {
+ SyncEngine.call(this, "History", service);
+}
+HistoryEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _recordObj: HistoryRec,
+ _storeObj: HistoryStore,
+ _trackerObj: HistoryTracker,
+ downloadLimit: MAX_HISTORY_DOWNLOAD,
+ applyIncomingBatchSize: HISTORY_STORE_BATCH_SIZE,
+
+ syncPriority: 7,
+
+ _processIncoming: function (newitems) {
+ // We want to notify history observers that a batch operation is underway
+ // so they don't do lots of work for each incoming record.
+ let observers = PlacesUtils.history.getObservers();
+ function notifyHistoryObservers(notification) {
+ for (let observer of observers) {
+ try {
+ observer[notification]();
+ } catch (ex) { }
+ }
+ }
+ notifyHistoryObservers("onBeginUpdateBatch");
+ try {
+ return SyncEngine.prototype._processIncoming.call(this, newitems);
+ } finally {
+ notifyHistoryObservers("onEndUpdateBatch");
+ }
+ },
+};
+
+function HistoryStore(name, engine) {
+ Store.call(this, name, engine);
+
+ // Explicitly nullify our references to our cached services so we don't leak
+ Svc.Obs.add("places-shutdown", function() {
+ for (let query in this._stmts) {
+ let stmt = this._stmts;
+ stmt.finalize();
+ }
+ this._stmts = {};
+ }, this);
+}
+HistoryStore.prototype = {
+ __proto__: Store.prototype,
+
+ __asyncHistory: null,
+ get _asyncHistory() {
+ if (!this.__asyncHistory) {
+ this.__asyncHistory = Cc["@mozilla.org/browser/history;1"]
+ .getService(Ci.mozIAsyncHistory);
+ }
+ return this.__asyncHistory;
+ },
+
+ _stmts: {},
+ _getStmt: function(query) {
+ if (query in this._stmts) {
+ return this._stmts[query];
+ }
+
+ this._log.trace("Creating SQL statement: " + query);
+ let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase)
+ .DBConnection;
+ return this._stmts[query] = db.createAsyncStatement(query);
+ },
+
+ get _setGUIDStm() {
+ return this._getStmt(
+ "UPDATE moz_places " +
+ "SET guid = :guid " +
+ "WHERE url_hash = hash(:page_url) AND url = :page_url");
+ },
+
+ // Some helper functions to handle GUIDs
+ setGUID: function setGUID(uri, guid) {
+ uri = uri.spec ? uri.spec : uri;
+
+ if (!guid) {
+ guid = Utils.makeGUID();
+ }
+
+ let stmt = this._setGUIDStm;
+ stmt.params.guid = guid;
+ stmt.params.page_url = uri;
+ Async.querySpinningly(stmt);
+ return guid;
+ },
+
+ get _guidStm() {
+ return this._getStmt(
+ "SELECT guid " +
+ "FROM moz_places " +
+ "WHERE url_hash = hash(:page_url) AND url = :page_url");
+ },
+ _guidCols: ["guid"],
+
+ GUIDForUri: function GUIDForUri(uri, create) {
+ let stm = this._guidStm;
+ stm.params.page_url = uri.spec ? uri.spec : uri;
+
+ // Use the existing GUID if it exists
+ let result = Async.querySpinningly(stm, this._guidCols)[0];
+ if (result && result.guid)
+ return result.guid;
+
+ // Give the uri a GUID if it doesn't have one
+ if (create)
+ return this.setGUID(uri);
+ },
+
+ get _visitStm() {
+ return this._getStmt(`/* do not warn (bug 599936) */
+ SELECT visit_type type, visit_date date
+ FROM moz_historyvisits
+ JOIN moz_places h ON h.id = place_id
+ WHERE url_hash = hash(:url) AND url = :url
+ ORDER BY date DESC LIMIT 20`);
+ },
+ _visitCols: ["date", "type"],
+
+ get _urlStm() {
+ return this._getStmt(
+ "SELECT url, title, frecency " +
+ "FROM moz_places " +
+ "WHERE guid = :guid");
+ },
+ _urlCols: ["url", "title", "frecency"],
+
+ get _allUrlStm() {
+ return this._getStmt(
+ "SELECT url " +
+ "FROM moz_places " +
+ "WHERE last_visit_date > :cutoff_date " +
+ "ORDER BY frecency DESC " +
+ "LIMIT :max_results");
+ },
+ _allUrlCols: ["url"],
+
+ // See bug 320831 for why we use SQL here
+ _getVisits: function HistStore__getVisits(uri) {
+ this._visitStm.params.url = uri;
+ return Async.querySpinningly(this._visitStm, this._visitCols);
+ },
+
+ // See bug 468732 for why we use SQL here
+ _findURLByGUID: function HistStore__findURLByGUID(guid) {
+ this._urlStm.params.guid = guid;
+ return Async.querySpinningly(this._urlStm, this._urlCols)[0];
+ },
+
+ changeItemID: function HStore_changeItemID(oldID, newID) {
+ this.setGUID(this._findURLByGUID(oldID).url, newID);
+ },
+
+
+ getAllIDs: function HistStore_getAllIDs() {
+ // Only get places visited within the last 30 days (30*24*60*60*1000ms)
+ this._allUrlStm.params.cutoff_date = (Date.now() - 2592000000) * 1000;
+ this._allUrlStm.params.max_results = MAX_HISTORY_UPLOAD;
+
+ let urls = Async.querySpinningly(this._allUrlStm, this._allUrlCols);
+ let self = this;
+ return urls.reduce(function(ids, item) {
+ ids[self.GUIDForUri(item.url, true)] = item.url;
+ return ids;
+ }, {});
+ },
+
+ applyIncomingBatch: function applyIncomingBatch(records) {
+ let failed = [];
+
+ // Convert incoming records to mozIPlaceInfo objects. Some records can be
+ // ignored or handled directly, so we're rewriting the array in-place.
+ let i, k;
+ for (i = 0, k = 0; i < records.length; i++) {
+ let record = records[k] = records[i];
+ let shouldApply;
+
+ // This is still synchronous I/O for now.
+ try {
+ if (record.deleted) {
+ // Consider using nsIBrowserHistory::removePages() here.
+ this.remove(record);
+ // No further processing needed. Remove it from the list.
+ shouldApply = false;
+ } else {
+ shouldApply = this._recordToPlaceInfo(record);
+ }
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ failed.push(record.id);
+ shouldApply = false;
+ }
+
+ if (shouldApply) {
+ k += 1;
+ }
+ }
+ records.length = k; // truncate array
+
+ // Nothing to do.
+ if (!records.length) {
+ return failed;
+ }
+
+ let updatePlacesCallback = {
+ handleResult: function handleResult() {},
+ handleError: function handleError(resultCode, placeInfo) {
+ failed.push(placeInfo.guid);
+ },
+ handleCompletion: Async.makeSyncCallback()
+ };
+ this._asyncHistory.updatePlaces(records, updatePlacesCallback);
+ Async.waitForSyncCallback(updatePlacesCallback.handleCompletion);
+ return failed;
+ },
+
+ /**
+ * Converts a Sync history record to a mozIPlaceInfo.
+ *
+ * Throws if an invalid record is encountered (invalid URI, etc.),
+ * returns true if the record is to be applied, false otherwise
+ * (no visits to add, etc.),
+ */
+ _recordToPlaceInfo: function _recordToPlaceInfo(record) {
+ // Sort out invalid URIs and ones Places just simply doesn't want.
+ record.uri = Utils.makeURI(record.histUri);
+ if (!record.uri) {
+ this._log.warn("Attempted to process invalid URI, skipping.");
+ throw "Invalid URI in record";
+ }
+
+ if (!Utils.checkGUID(record.id)) {
+ this._log.warn("Encountered record with invalid GUID: " + record.id);
+ return false;
+ }
+ record.guid = record.id;
+
+ if (!PlacesUtils.history.canAddURI(record.uri)) {
+ this._log.trace("Ignoring record " + record.id + " with URI "
+ + record.uri.spec + ": can't add this URI.");
+ return false;
+ }
+
+ // We dupe visits by date and type. So an incoming visit that has
+ // the same timestamp and type as a local one won't get applied.
+ // To avoid creating new objects, we rewrite the query result so we
+ // can simply check for containment below.
+ let curVisits = this._getVisits(record.histUri);
+ let i, k;
+ for (i = 0; i < curVisits.length; i++) {
+ curVisits[i] = curVisits[i].date + "," + curVisits[i].type;
+ }
+
+ // Walk through the visits, make sure we have sound data, and eliminate
+ // dupes. The latter is done by rewriting the array in-place.
+ for (i = 0, k = 0; i < record.visits.length; i++) {
+ let visit = record.visits[k] = record.visits[i];
+
+ if (!visit.date || typeof visit.date != "number") {
+ this._log.warn("Encountered record with invalid visit date: "
+ + visit.date);
+ continue;
+ }
+
+ if (!visit.type ||
+ !Object.values(PlacesUtils.history.TRANSITIONS).includes(visit.type)) {
+ this._log.warn("Encountered record with invalid visit type: " +
+ visit.type + "; ignoring.");
+ continue;
+ }
+
+ // Dates need to be integers.
+ visit.date = Math.round(visit.date);
+
+ if (curVisits.indexOf(visit.date + "," + visit.type) != -1) {
+ // Visit is a dupe, don't increment 'k' so the element will be
+ // overwritten.
+ continue;
+ }
+
+ visit.visitDate = visit.date;
+ visit.transitionType = visit.type;
+ k += 1;
+ }
+ record.visits.length = k; // truncate array
+
+ // No update if there aren't any visits to apply.
+ // mozIAsyncHistory::updatePlaces() wants at least one visit.
+ // In any case, the only thing we could change would be the title
+ // and that shouldn't change without a visit.
+ if (!record.visits.length) {
+ this._log.trace("Ignoring record " + record.id + " with URI "
+ + record.uri.spec + ": no visits to add.");
+ return false;
+ }
+
+ return true;
+ },
+
+ remove: function HistStore_remove(record) {
+ let page = this._findURLByGUID(record.id);
+ if (page == null) {
+ this._log.debug("Page already removed: " + record.id);
+ return;
+ }
+
+ let uri = Utils.makeURI(page.url);
+ PlacesUtils.history.removePage(uri);
+ this._log.trace("Removed page: " + [record.id, page.url, page.title]);
+ },
+
+ itemExists: function HistStore_itemExists(id) {
+ return !!this._findURLByGUID(id);
+ },
+
+ createRecord: function createRecord(id, collection) {
+ let foo = this._findURLByGUID(id);
+ let record = new HistoryRec(collection, id);
+ if (foo) {
+ record.histUri = foo.url;
+ record.title = foo.title;
+ record.sortindex = foo.frecency;
+ record.visits = this._getVisits(record.histUri);
+ } else {
+ record.deleted = true;
+ }
+
+ return record;
+ },
+
+ wipe: function HistStore_wipe() {
+ let cb = Async.makeSyncCallback();
+ PlacesUtils.history.clear().then(result => {cb(null, result)}, err => {cb(err)});
+ return Async.waitForSyncCallback(cb);
+ }
+};
+
+function HistoryTracker(name, engine) {
+ Tracker.call(this, name, engine);
+}
+HistoryTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ startTracking: function() {
+ this._log.info("Adding Places observer.");
+ PlacesUtils.history.addObserver(this, true);
+ },
+
+ stopTracking: function() {
+ this._log.info("Removing Places observer.");
+ PlacesUtils.history.removeObserver(this);
+ },
+
+ QueryInterface: XPCOMUtils.generateQI([
+ Ci.nsINavHistoryObserver,
+ Ci.nsISupportsWeakReference
+ ]),
+
+ onDeleteAffectsGUID: function (uri, guid, reason, source, increment) {
+ if (this.ignoreAll || reason == Ci.nsINavHistoryObserver.REASON_EXPIRED) {
+ return;
+ }
+ this._log.trace(source + ": " + uri.spec + ", reason " + reason);
+ if (this.addChangedID(guid)) {
+ this.score += increment;
+ }
+ },
+
+ onDeleteVisits: function (uri, visitTime, guid, reason) {
+ this.onDeleteAffectsGUID(uri, guid, reason, "onDeleteVisits", SCORE_INCREMENT_SMALL);
+ },
+
+ onDeleteURI: function (uri, guid, reason) {
+ this.onDeleteAffectsGUID(uri, guid, reason, "onDeleteURI", SCORE_INCREMENT_XLARGE);
+ },
+
+ onVisit: function (uri, vid, time, session, referrer, trans, guid) {
+ if (this.ignoreAll) {
+ this._log.trace("ignoreAll: ignoring visit for " + guid);
+ return;
+ }
+
+ this._log.trace("onVisit: " + uri.spec);
+ if (this.addChangedID(guid)) {
+ this.score += SCORE_INCREMENT_SMALL;
+ }
+ },
+
+ onClearHistory: function () {
+ this._log.trace("onClearHistory");
+ // Note that we're going to trigger a sync, but none of the cleared
+ // pages are tracked, so the deletions will not be propagated.
+ // See Bug 578694.
+ this.score += SCORE_INCREMENT_XLARGE;
+ },
+
+ onBeginUpdateBatch: function () {},
+ onEndUpdateBatch: function () {},
+ onPageChanged: function () {},
+ onTitleChanged: function () {},
+ onBeforeDeleteURI: function () {},
+};
diff --git a/services/sync/modules/engines/passwords.js b/services/sync/modules/engines/passwords.js
new file mode 100644
index 000000000..51db49a0a
--- /dev/null
+++ b/services/sync/modules/engines/passwords.js
@@ -0,0 +1,371 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ['PasswordEngine', 'LoginRec', 'PasswordValidator'];
+
+var {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/collection_validator.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/async.js");
+
+this.LoginRec = function LoginRec(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+LoginRec.prototype = {
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Sync.Record.Login",
+};
+
+Utils.deferGetSet(LoginRec, "cleartext", [
+ "hostname", "formSubmitURL",
+ "httpRealm", "username", "password", "usernameField", "passwordField",
+ "timeCreated", "timePasswordChanged",
+ ]);
+
+
+this.PasswordEngine = function PasswordEngine(service) {
+ SyncEngine.call(this, "Passwords", service);
+}
+PasswordEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _storeObj: PasswordStore,
+ _trackerObj: PasswordTracker,
+ _recordObj: LoginRec,
+
+ applyIncomingBatchSize: PASSWORDS_STORE_BATCH_SIZE,
+
+ syncPriority: 2,
+
+ _syncFinish: function () {
+ SyncEngine.prototype._syncFinish.call(this);
+
+ // Delete the Weave credentials from the server once.
+ if (!Svc.Prefs.get("deletePwdFxA", false)) {
+ try {
+ let ids = [];
+ for (let host of Utils.getSyncCredentialsHosts()) {
+ for (let info of Services.logins.findLogins({}, host, "", "")) {
+ ids.push(info.QueryInterface(Components.interfaces.nsILoginMetaInfo).guid);
+ }
+ }
+ if (ids.length) {
+ let coll = new Collection(this.engineURL, null, this.service);
+ coll.ids = ids;
+ let ret = coll.delete();
+ this._log.debug("Delete result: " + ret);
+ if (!ret.success && ret.status != 400) {
+ // A non-400 failure means try again next time.
+ return;
+ }
+ } else {
+ this._log.debug("Didn't find any passwords to delete");
+ }
+ // If there were no ids to delete, or we succeeded, or got a 400,
+ // record success.
+ Svc.Prefs.set("deletePwdFxA", true);
+ Svc.Prefs.reset("deletePwd"); // The old prefname we previously used.
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.debug("Password deletes failed", ex);
+ }
+ }
+ },
+
+ _findDupe: function (item) {
+ let login = this._store._nsLoginInfoFromRecord(item);
+ if (!login) {
+ return;
+ }
+
+ let logins = Services.logins.findLogins({}, login.hostname, login.formSubmitURL, login.httpRealm);
+
+ this._store._sleep(0); // Yield back to main thread after synchronous operation.
+
+ // Look for existing logins that match the hostname, but ignore the password.
+ for (let local of logins) {
+ if (login.matches(local, true) && local instanceof Ci.nsILoginMetaInfo) {
+ return local.guid;
+ }
+ }
+ },
+};
+
+function PasswordStore(name, engine) {
+ Store.call(this, name, engine);
+ this._nsLoginInfo = new Components.Constructor("@mozilla.org/login-manager/loginInfo;1", Ci.nsILoginInfo, "init");
+}
+PasswordStore.prototype = {
+ __proto__: Store.prototype,
+
+ _newPropertyBag: function () {
+ return Cc["@mozilla.org/hash-property-bag;1"].createInstance(Ci.nsIWritablePropertyBag2);
+ },
+
+ /**
+ * Return an instance of nsILoginInfo (and, implicitly, nsILoginMetaInfo).
+ */
+ _nsLoginInfoFromRecord: function (record) {
+ function nullUndefined(x) {
+ return (x == undefined) ? null : x;
+ }
+
+ if (record.formSubmitURL && record.httpRealm) {
+ this._log.warn("Record " + record.id + " has both formSubmitURL and httpRealm. Skipping.");
+ return null;
+ }
+
+ // Passing in "undefined" results in an empty string, which later
+ // counts as a value. Explicitly `|| null` these fields according to JS
+ // truthiness. Records with empty strings or null will be unmolested.
+ let info = new this._nsLoginInfo(record.hostname,
+ nullUndefined(record.formSubmitURL),
+ nullUndefined(record.httpRealm),
+ record.username,
+ record.password,
+ record.usernameField,
+ record.passwordField);
+
+ info.QueryInterface(Ci.nsILoginMetaInfo);
+ info.guid = record.id;
+ if (record.timeCreated) {
+ info.timeCreated = record.timeCreated;
+ }
+ if (record.timePasswordChanged) {
+ info.timePasswordChanged = record.timePasswordChanged;
+ }
+
+ return info;
+ },
+
+ _getLoginFromGUID: function (id) {
+ let prop = this._newPropertyBag();
+ prop.setPropertyAsAUTF8String("guid", id);
+
+ let logins = Services.logins.searchLogins({}, prop);
+ this._sleep(0); // Yield back to main thread after synchronous operation.
+
+ if (logins.length > 0) {
+ this._log.trace(logins.length + " items matching " + id + " found.");
+ return logins[0];
+ }
+
+ this._log.trace("No items matching " + id + " found. Ignoring");
+ return null;
+ },
+
+ getAllIDs: function () {
+ let items = {};
+ let logins = Services.logins.getAllLogins({});
+
+ for (let i = 0; i < logins.length; i++) {
+ // Skip over Weave password/passphrase entries.
+ let metaInfo = logins[i].QueryInterface(Ci.nsILoginMetaInfo);
+ if (Utils.getSyncCredentialsHosts().has(metaInfo.hostname)) {
+ continue;
+ }
+
+ items[metaInfo.guid] = metaInfo;
+ }
+
+ return items;
+ },
+
+ changeItemID: function (oldID, newID) {
+ this._log.trace("Changing item ID: " + oldID + " to " + newID);
+
+ let oldLogin = this._getLoginFromGUID(oldID);
+ if (!oldLogin) {
+ this._log.trace("Can't change item ID: item doesn't exist");
+ return;
+ }
+ if (this._getLoginFromGUID(newID)) {
+ this._log.trace("Can't change item ID: new ID already in use");
+ return;
+ }
+
+ let prop = this._newPropertyBag();
+ prop.setPropertyAsAUTF8String("guid", newID);
+
+ Services.logins.modifyLogin(oldLogin, prop);
+ },
+
+ itemExists: function (id) {
+ return !!this._getLoginFromGUID(id);
+ },
+
+ createRecord: function (id, collection) {
+ let record = new LoginRec(collection, id);
+ let login = this._getLoginFromGUID(id);
+
+ if (!login) {
+ record.deleted = true;
+ return record;
+ }
+
+ record.hostname = login.hostname;
+ record.formSubmitURL = login.formSubmitURL;
+ record.httpRealm = login.httpRealm;
+ record.username = login.username;
+ record.password = login.password;
+ record.usernameField = login.usernameField;
+ record.passwordField = login.passwordField;
+
+ // Optional fields.
+ login.QueryInterface(Ci.nsILoginMetaInfo);
+ record.timeCreated = login.timeCreated;
+ record.timePasswordChanged = login.timePasswordChanged;
+
+ return record;
+ },
+
+ create: function (record) {
+ let login = this._nsLoginInfoFromRecord(record);
+ if (!login) {
+ return;
+ }
+
+ this._log.debug("Adding login for " + record.hostname);
+ this._log.trace("httpRealm: " + JSON.stringify(login.httpRealm) + "; " +
+ "formSubmitURL: " + JSON.stringify(login.formSubmitURL));
+ try {
+ Services.logins.addLogin(login);
+ } catch(ex) {
+ this._log.debug(`Adding record ${record.id} resulted in exception`, ex);
+ }
+ },
+
+ remove: function (record) {
+ this._log.trace("Removing login " + record.id);
+
+ let loginItem = this._getLoginFromGUID(record.id);
+ if (!loginItem) {
+ this._log.trace("Asked to remove record that doesn't exist, ignoring");
+ return;
+ }
+
+ Services.logins.removeLogin(loginItem);
+ },
+
+ update: function (record) {
+ let loginItem = this._getLoginFromGUID(record.id);
+ if (!loginItem) {
+ this._log.debug("Skipping update for unknown item: " + record.hostname);
+ return;
+ }
+
+ this._log.debug("Updating " + record.hostname);
+ let newinfo = this._nsLoginInfoFromRecord(record);
+ if (!newinfo) {
+ return;
+ }
+
+ try {
+ Services.logins.modifyLogin(loginItem, newinfo);
+ } catch(ex) {
+ this._log.debug(`Modifying record ${record.id} resulted in exception; not modifying`, ex);
+ }
+ },
+
+ wipe: function () {
+ Services.logins.removeAllLogins();
+ },
+};
+
+function PasswordTracker(name, engine) {
+ Tracker.call(this, name, engine);
+ Svc.Obs.add("weave:engine:start-tracking", this);
+ Svc.Obs.add("weave:engine:stop-tracking", this);
+}
+PasswordTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ startTracking: function () {
+ Svc.Obs.add("passwordmgr-storage-changed", this);
+ },
+
+ stopTracking: function () {
+ Svc.Obs.remove("passwordmgr-storage-changed", this);
+ },
+
+ observe: function (subject, topic, data) {
+ Tracker.prototype.observe.call(this, subject, topic, data);
+
+ if (this.ignoreAll) {
+ return;
+ }
+
+ // A single add, remove or change or removing all items
+ // will trigger a sync for MULTI_DEVICE.
+ switch (data) {
+ case "modifyLogin":
+ subject = subject.QueryInterface(Ci.nsIArray).queryElementAt(1, Ci.nsILoginMetaInfo);
+ // Fall through.
+ case "addLogin":
+ case "removeLogin":
+ // Skip over Weave password/passphrase changes.
+ subject.QueryInterface(Ci.nsILoginMetaInfo).QueryInterface(Ci.nsILoginInfo);
+ if (Utils.getSyncCredentialsHosts().has(subject.hostname)) {
+ break;
+ }
+
+ this.score += SCORE_INCREMENT_XLARGE;
+ this._log.trace(data + ": " + subject.guid);
+ this.addChangedID(subject.guid);
+ break;
+ case "removeAllLogins":
+ this._log.trace(data);
+ this.score += SCORE_INCREMENT_XLARGE;
+ break;
+ }
+ },
+};
+
+class PasswordValidator extends CollectionValidator {
+ constructor() {
+ super("passwords", "id", [
+ "hostname",
+ "formSubmitURL",
+ "httpRealm",
+ "password",
+ "passwordField",
+ "username",
+ "usernameField",
+ ]);
+ }
+
+ getClientItems() {
+ let logins = Services.logins.getAllLogins({});
+ let syncHosts = Utils.getSyncCredentialsHosts()
+ let result = logins.map(l => l.QueryInterface(Ci.nsILoginMetaInfo))
+ .filter(l => !syncHosts.has(l.hostname));
+ return Promise.resolve(result);
+ }
+
+ normalizeClientItem(item) {
+ return {
+ id: item.guid,
+ guid: item.guid,
+ hostname: item.hostname,
+ formSubmitURL: item.formSubmitURL,
+ httpRealm: item.httpRealm,
+ password: item.password,
+ passwordField: item.passwordField,
+ username: item.username,
+ usernameField: item.usernameField,
+ original: item,
+ }
+ }
+
+ normalizeServerItem(item) {
+ return Object.assign({ guid: item.id }, item);
+ }
+}
+
+
diff --git a/services/sync/modules/engines/prefs.js b/services/sync/modules/engines/prefs.js
new file mode 100644
index 000000000..9ceeb9ac6
--- /dev/null
+++ b/services/sync/modules/engines/prefs.js
@@ -0,0 +1,273 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ['PrefsEngine', 'PrefRec'];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+const PREF_SYNC_PREFS_PREFIX = "services.sync.prefs.sync.";
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://gre/modules/LightweightThemeManager.jsm");
+Cu.import("resource://gre/modules/Preferences.jsm");
+
+const PREFS_GUID = CommonUtils.encodeBase64URL(Services.appinfo.ID);
+
+this.PrefRec = function PrefRec(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+PrefRec.prototype = {
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Sync.Record.Pref",
+};
+
+Utils.deferGetSet(PrefRec, "cleartext", ["value"]);
+
+
+this.PrefsEngine = function PrefsEngine(service) {
+ SyncEngine.call(this, "Prefs", service);
+}
+PrefsEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _storeObj: PrefStore,
+ _trackerObj: PrefTracker,
+ _recordObj: PrefRec,
+ version: 2,
+
+ syncPriority: 1,
+ allowSkippedRecord: false,
+
+ getChangedIDs: function () {
+ // No need for a proper timestamp (no conflict resolution needed).
+ let changedIDs = {};
+ if (this._tracker.modified)
+ changedIDs[PREFS_GUID] = 0;
+ return changedIDs;
+ },
+
+ _wipeClient: function () {
+ SyncEngine.prototype._wipeClient.call(this);
+ this.justWiped = true;
+ },
+
+ _reconcile: function (item) {
+ // Apply the incoming item if we don't care about the local data
+ if (this.justWiped) {
+ this.justWiped = false;
+ return true;
+ }
+ return SyncEngine.prototype._reconcile.call(this, item);
+ }
+};
+
+
+function PrefStore(name, engine) {
+ Store.call(this, name, engine);
+ Svc.Obs.add("profile-before-change", function () {
+ this.__prefs = null;
+ }, this);
+}
+PrefStore.prototype = {
+ __proto__: Store.prototype,
+
+ __prefs: null,
+ get _prefs() {
+ if (!this.__prefs) {
+ this.__prefs = new Preferences();
+ }
+ return this.__prefs;
+ },
+
+ _getSyncPrefs: function () {
+ let syncPrefs = Cc["@mozilla.org/preferences-service;1"]
+ .getService(Ci.nsIPrefService)
+ .getBranch(PREF_SYNC_PREFS_PREFIX)
+ .getChildList("", {});
+ // Also sync preferences that determine which prefs get synced.
+ let controlPrefs = syncPrefs.map(pref => PREF_SYNC_PREFS_PREFIX + pref);
+ return controlPrefs.concat(syncPrefs);
+ },
+
+ _isSynced: function (pref) {
+ return pref.startsWith(PREF_SYNC_PREFS_PREFIX) ||
+ this._prefs.get(PREF_SYNC_PREFS_PREFIX + pref, false);
+ },
+
+ _getAllPrefs: function () {
+ let values = {};
+ for (let pref of this._getSyncPrefs()) {
+ if (this._isSynced(pref)) {
+ // Missing and default prefs get the null value.
+ values[pref] = this._prefs.isSet(pref) ? this._prefs.get(pref, null) : null;
+ }
+ }
+ return values;
+ },
+
+ _updateLightWeightTheme (themeID) {
+ let themeObject = null;
+ if (themeID) {
+ themeObject = LightweightThemeManager.getUsedTheme(themeID);
+ }
+ LightweightThemeManager.currentTheme = themeObject;
+ },
+
+ _setAllPrefs: function (values) {
+ let selectedThemeIDPref = "lightweightThemes.selectedThemeID";
+ let selectedThemeIDBefore = this._prefs.get(selectedThemeIDPref, null);
+ let selectedThemeIDAfter = selectedThemeIDBefore;
+
+ // Update 'services.sync.prefs.sync.foo.pref' before 'foo.pref', otherwise
+ // _isSynced returns false when 'foo.pref' doesn't exist (e.g., on a new device).
+ let prefs = Object.keys(values).sort(a => -a.indexOf(PREF_SYNC_PREFS_PREFIX));
+ for (let pref of prefs) {
+ if (!this._isSynced(pref)) {
+ continue;
+ }
+
+ let value = values[pref];
+
+ switch (pref) {
+ // Some special prefs we don't want to set directly.
+ case selectedThemeIDPref:
+ selectedThemeIDAfter = value;
+ break;
+
+ // default is to just set the pref
+ default:
+ if (value == null) {
+ // Pref has gone missing. The best we can do is reset it.
+ this._prefs.reset(pref);
+ } else {
+ try {
+ this._prefs.set(pref, value);
+ } catch(ex) {
+ this._log.trace("Failed to set pref: " + pref + ": " + ex);
+ }
+ }
+ }
+ }
+
+ // Notify the lightweight theme manager if the selected theme has changed.
+ if (selectedThemeIDBefore != selectedThemeIDAfter) {
+ this._updateLightWeightTheme(selectedThemeIDAfter);
+ }
+ },
+
+ getAllIDs: function () {
+ /* We store all prefs in just one WBO, with just one GUID */
+ let allprefs = {};
+ allprefs[PREFS_GUID] = true;
+ return allprefs;
+ },
+
+ changeItemID: function (oldID, newID) {
+ this._log.trace("PrefStore GUID is constant!");
+ },
+
+ itemExists: function (id) {
+ return (id === PREFS_GUID);
+ },
+
+ createRecord: function (id, collection) {
+ let record = new PrefRec(collection, id);
+
+ if (id == PREFS_GUID) {
+ record.value = this._getAllPrefs();
+ } else {
+ record.deleted = true;
+ }
+
+ return record;
+ },
+
+ create: function (record) {
+ this._log.trace("Ignoring create request");
+ },
+
+ remove: function (record) {
+ this._log.trace("Ignoring remove request");
+ },
+
+ update: function (record) {
+ // Silently ignore pref updates that are for other apps.
+ if (record.id != PREFS_GUID)
+ return;
+
+ this._log.trace("Received pref updates, applying...");
+ this._setAllPrefs(record.value);
+ },
+
+ wipe: function () {
+ this._log.trace("Ignoring wipe request");
+ }
+};
+
+function PrefTracker(name, engine) {
+ Tracker.call(this, name, engine);
+ Svc.Obs.add("profile-before-change", this);
+ Svc.Obs.add("weave:engine:start-tracking", this);
+ Svc.Obs.add("weave:engine:stop-tracking", this);
+}
+PrefTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ get modified() {
+ return Svc.Prefs.get("engine.prefs.modified", false);
+ },
+ set modified(value) {
+ Svc.Prefs.set("engine.prefs.modified", value);
+ },
+
+ loadChangedIDs: function loadChangedIDs() {
+ // Don't read changed IDs from disk at start up.
+ },
+
+ clearChangedIDs: function clearChangedIDs() {
+ this.modified = false;
+ },
+
+ __prefs: null,
+ get _prefs() {
+ if (!this.__prefs) {
+ this.__prefs = new Preferences();
+ }
+ return this.__prefs;
+ },
+
+ startTracking: function () {
+ Services.prefs.addObserver("", this, false);
+ },
+
+ stopTracking: function () {
+ this.__prefs = null;
+ Services.prefs.removeObserver("", this);
+ },
+
+ observe: function (subject, topic, data) {
+ Tracker.prototype.observe.call(this, subject, topic, data);
+
+ switch (topic) {
+ case "profile-before-change":
+ this.stopTracking();
+ break;
+ case "nsPref:changed":
+ // Trigger a sync for MULTI-DEVICE for a change that determines
+ // which prefs are synced or a regular pref change.
+ if (data.indexOf(PREF_SYNC_PREFS_PREFIX) == 0 ||
+ this._prefs.get(PREF_SYNC_PREFS_PREFIX + data, false)) {
+ this.score += SCORE_INCREMENT_XLARGE;
+ this.modified = true;
+ this._log.trace("Preference " + data + " changed");
+ }
+ break;
+ }
+ }
+};
diff --git a/services/sync/modules/engines/tabs.js b/services/sync/modules/engines/tabs.js
new file mode 100644
index 000000000..45ece4a23
--- /dev/null
+++ b/services/sync/modules/engines/tabs.js
@@ -0,0 +1,393 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["TabEngine", "TabSetRecord"];
+
+var {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+const TABS_TTL = 604800; // 7 days.
+const TAB_ENTRIES_LIMIT = 25; // How many URLs to include in tab history.
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/constants.js");
+
+XPCOMUtils.defineLazyModuleGetter(this, "PrivateBrowsingUtils",
+ "resource://gre/modules/PrivateBrowsingUtils.jsm");
+
+this.TabSetRecord = function TabSetRecord(collection, id) {
+ CryptoWrapper.call(this, collection, id);
+}
+TabSetRecord.prototype = {
+ __proto__: CryptoWrapper.prototype,
+ _logName: "Sync.Record.Tabs",
+ ttl: TABS_TTL,
+};
+
+Utils.deferGetSet(TabSetRecord, "cleartext", ["clientName", "tabs"]);
+
+
+this.TabEngine = function TabEngine(service) {
+ SyncEngine.call(this, "Tabs", service);
+
+ // Reset the client on every startup so that we fetch recent tabs.
+ this._resetClient();
+}
+TabEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _storeObj: TabStore,
+ _trackerObj: TabTracker,
+ _recordObj: TabSetRecord,
+ // A flag to indicate if we have synced in this session. This is to help
+ // consumers of remote tabs that may want to differentiate between "I've an
+ // empty tab list as I haven't yet synced" vs "I've an empty tab list
+ // as there really are no tabs"
+ hasSyncedThisSession: false,
+
+ syncPriority: 3,
+
+ getChangedIDs: function () {
+ // No need for a proper timestamp (no conflict resolution needed).
+ let changedIDs = {};
+ if (this._tracker.modified)
+ changedIDs[this.service.clientsEngine.localID] = 0;
+ return changedIDs;
+ },
+
+ // API for use by Sync UI code to give user choices of tabs to open.
+ getAllClients: function () {
+ return this._store._remoteClients;
+ },
+
+ getClientById: function (id) {
+ return this._store._remoteClients[id];
+ },
+
+ _resetClient: function () {
+ SyncEngine.prototype._resetClient.call(this);
+ this._store.wipe();
+ this._tracker.modified = true;
+ this.hasSyncedThisSession = false;
+ },
+
+ removeClientData: function () {
+ let url = this.engineURL + "/" + this.service.clientsEngine.localID;
+ this.service.resource(url).delete();
+ },
+
+ /**
+ * Return a Set of open URLs.
+ */
+ getOpenURLs: function () {
+ let urls = new Set();
+ for (let entry of this._store.getAllTabs()) {
+ urls.add(entry.urlHistory[0]);
+ }
+ return urls;
+ },
+
+ _reconcile: function (item) {
+ // Skip our own record.
+ // TabStore.itemExists tests only against our local client ID.
+ if (this._store.itemExists(item.id)) {
+ this._log.trace("Ignoring incoming tab item because of its id: " + item.id);
+ return false;
+ }
+
+ return SyncEngine.prototype._reconcile.call(this, item);
+ },
+
+ _syncFinish() {
+ this.hasSyncedThisSession = true;
+ return SyncEngine.prototype._syncFinish.call(this);
+ },
+};
+
+
+function TabStore(name, engine) {
+ Store.call(this, name, engine);
+}
+TabStore.prototype = {
+ __proto__: Store.prototype,
+
+ itemExists: function (id) {
+ return id == this.engine.service.clientsEngine.localID;
+ },
+
+ getWindowEnumerator: function () {
+ return Services.wm.getEnumerator("navigator:browser");
+ },
+
+ shouldSkipWindow: function (win) {
+ return win.closed ||
+ PrivateBrowsingUtils.isWindowPrivate(win);
+ },
+
+ getTabState: function (tab) {
+ return JSON.parse(Svc.Session.getTabState(tab));
+ },
+
+ getAllTabs: function (filter) {
+ let filteredUrls = new RegExp(Svc.Prefs.get("engine.tabs.filteredUrls"), "i");
+
+ let allTabs = [];
+
+ let winEnum = this.getWindowEnumerator();
+ while (winEnum.hasMoreElements()) {
+ let win = winEnum.getNext();
+ if (this.shouldSkipWindow(win)) {
+ continue;
+ }
+
+ for (let tab of win.gBrowser.tabs) {
+ let tabState = this.getTabState(tab);
+
+ // Make sure there are history entries to look at.
+ if (!tabState || !tabState.entries.length) {
+ continue;
+ }
+
+ let acceptable = !filter ? (url) => url :
+ (url) => url && !filteredUrls.test(url);
+
+ let entries = tabState.entries;
+ let index = tabState.index;
+ let current = entries[index - 1];
+
+ // We ignore the tab completely if the current entry url is
+ // not acceptable (we need something accurate to open).
+ if (!acceptable(current.url)) {
+ continue;
+ }
+
+ if (current.url.length >= (MAX_UPLOAD_BYTES - 1000)) {
+ this._log.trace("Skipping over-long URL.");
+ continue;
+ }
+
+ // The element at `index` is the current page. Previous URLs were
+ // previously visited URLs; subsequent URLs are in the 'forward' stack,
+ // which we can't represent in Sync, so we truncate here.
+ let candidates = (entries.length == index) ?
+ entries :
+ entries.slice(0, index);
+
+ let urls = candidates.map((entry) => entry.url)
+ .filter(acceptable)
+ .reverse(); // Because Sync puts current at index 0, and history after.
+
+ // Truncate if necessary.
+ if (urls.length > TAB_ENTRIES_LIMIT) {
+ urls.length = TAB_ENTRIES_LIMIT;
+ }
+
+ allTabs.push({
+ title: current.title || "",
+ urlHistory: urls,
+ icon: tabState.image ||
+ (tabState.attributes && tabState.attributes.image) ||
+ "",
+ lastUsed: Math.floor((tabState.lastAccessed || 0) / 1000),
+ });
+ }
+ }
+
+ return allTabs;
+ },
+
+ createRecord: function (id, collection) {
+ let record = new TabSetRecord(collection, id);
+ record.clientName = this.engine.service.clientsEngine.localName;
+
+ // Sort tabs in descending-used order to grab the most recently used
+ let tabs = this.getAllTabs(true).sort(function (a, b) {
+ return b.lastUsed - a.lastUsed;
+ });
+
+ // Figure out how many tabs we can pack into a payload. Starting with a 28KB
+ // payload, we can estimate various overheads from encryption/JSON/WBO.
+ let size = JSON.stringify(tabs).length;
+ let origLength = tabs.length;
+ const MAX_TAB_SIZE = 20000;
+ if (size > MAX_TAB_SIZE) {
+ // Estimate a little more than the direct fraction to maximize packing
+ let cutoff = Math.ceil(tabs.length * MAX_TAB_SIZE / size);
+ tabs = tabs.slice(0, cutoff + 1);
+
+ // Keep dropping off the last entry until the data fits
+ while (JSON.stringify(tabs).length > MAX_TAB_SIZE)
+ tabs.pop();
+ }
+
+ this._log.trace("Created tabs " + tabs.length + " of " + origLength);
+ tabs.forEach(function (tab) {
+ this._log.trace("Wrapping tab: " + JSON.stringify(tab));
+ }, this);
+
+ record.tabs = tabs;
+ return record;
+ },
+
+ getAllIDs: function () {
+ // Don't report any tabs if all windows are in private browsing for
+ // first syncs.
+ let ids = {};
+ let allWindowsArePrivate = false;
+ let wins = Services.wm.getEnumerator("navigator:browser");
+ while (wins.hasMoreElements()) {
+ if (PrivateBrowsingUtils.isWindowPrivate(wins.getNext())) {
+ // Ensure that at least there is a private window.
+ allWindowsArePrivate = true;
+ } else {
+ // If there is a not private windown then finish and continue.
+ allWindowsArePrivate = false;
+ break;
+ }
+ }
+
+ if (allWindowsArePrivate &&
+ !PrivateBrowsingUtils.permanentPrivateBrowsing) {
+ return ids;
+ }
+
+ ids[this.engine.service.clientsEngine.localID] = true;
+ return ids;
+ },
+
+ wipe: function () {
+ this._remoteClients = {};
+ },
+
+ create: function (record) {
+ this._log.debug("Adding remote tabs from " + record.clientName);
+ this._remoteClients[record.id] = Object.assign({}, record.cleartext, {
+ lastModified: record.modified
+ });
+ },
+
+ update: function (record) {
+ this._log.trace("Ignoring tab updates as local ones win");
+ },
+};
+
+
+function TabTracker(name, engine) {
+ Tracker.call(this, name, engine);
+ Svc.Obs.add("weave:engine:start-tracking", this);
+ Svc.Obs.add("weave:engine:stop-tracking", this);
+
+ // Make sure "this" pointer is always set correctly for event listeners.
+ this.onTab = Utils.bind2(this, this.onTab);
+ this._unregisterListeners = Utils.bind2(this, this._unregisterListeners);
+}
+TabTracker.prototype = {
+ __proto__: Tracker.prototype,
+
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsIObserver]),
+
+ loadChangedIDs: function () {
+ // Don't read changed IDs from disk at start up.
+ },
+
+ clearChangedIDs: function () {
+ this.modified = false;
+ },
+
+ _topics: ["pageshow", "TabOpen", "TabClose", "TabSelect"],
+
+ _registerListenersForWindow: function (window) {
+ this._log.trace("Registering tab listeners in window");
+ for (let topic of this._topics) {
+ window.addEventListener(topic, this.onTab, false);
+ }
+ window.addEventListener("unload", this._unregisterListeners, false);
+ // If it's got a tab browser we can listen for things like navigation.
+ if (window.gBrowser) {
+ window.gBrowser.addProgressListener(this);
+ }
+ },
+
+ _unregisterListeners: function (event) {
+ this._unregisterListenersForWindow(event.target);
+ },
+
+ _unregisterListenersForWindow: function (window) {
+ this._log.trace("Removing tab listeners in window");
+ window.removeEventListener("unload", this._unregisterListeners, false);
+ for (let topic of this._topics) {
+ window.removeEventListener(topic, this.onTab, false);
+ }
+ if (window.gBrowser) {
+ window.gBrowser.removeProgressListener(this);
+ }
+ },
+
+ startTracking: function () {
+ Svc.Obs.add("domwindowopened", this);
+ let wins = Services.wm.getEnumerator("navigator:browser");
+ while (wins.hasMoreElements()) {
+ this._registerListenersForWindow(wins.getNext());
+ }
+ },
+
+ stopTracking: function () {
+ Svc.Obs.remove("domwindowopened", this);
+ let wins = Services.wm.getEnumerator("navigator:browser");
+ while (wins.hasMoreElements()) {
+ this._unregisterListenersForWindow(wins.getNext());
+ }
+ },
+
+ observe: function (subject, topic, data) {
+ Tracker.prototype.observe.call(this, subject, topic, data);
+
+ switch (topic) {
+ case "domwindowopened":
+ let onLoad = () => {
+ subject.removeEventListener("load", onLoad, false);
+ // Only register after the window is done loading to avoid unloads.
+ this._registerListenersForWindow(subject);
+ };
+
+ // Add tab listeners now that a window has opened.
+ subject.addEventListener("load", onLoad, false);
+ break;
+ }
+ },
+
+ onTab: function (event) {
+ if (event.originalTarget.linkedBrowser) {
+ let browser = event.originalTarget.linkedBrowser;
+ if (PrivateBrowsingUtils.isBrowserPrivate(browser) &&
+ !PrivateBrowsingUtils.permanentPrivateBrowsing) {
+ this._log.trace("Ignoring tab event from private browsing.");
+ return;
+ }
+ }
+
+ this._log.trace("onTab event: " + event.type);
+ this.modified = true;
+
+ // For page shows, bump the score 10% of the time, emulating a partial
+ // score. We don't want to sync too frequently. For all other page
+ // events, always bump the score.
+ if (event.type != "pageshow" || Math.random() < .1) {
+ this.score += SCORE_INCREMENT_SMALL;
+ }
+ },
+
+ // web progress listeners.
+ onLocationChange: function (webProgress, request, location, flags) {
+ // We only care about top-level location changes which are not in the same
+ // document.
+ if (webProgress.isTopLevel &&
+ ((flags & Ci.nsIWebProgressListener.LOCATION_CHANGE_SAME_DOCUMENT) == 0)) {
+ this.modified = true;
+ }
+ },
+};
diff --git a/services/sync/modules/identity.js b/services/sync/modules/identity.js
new file mode 100644
index 000000000..b4da8c0bb
--- /dev/null
+++ b/services/sync/modules/identity.js
@@ -0,0 +1,605 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = ["IdentityManager"];
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/async.js");
+
+// Lazy import to prevent unnecessary load on startup.
+for (let symbol of ["BulkKeyBundle", "SyncKeyBundle"]) {
+ XPCOMUtils.defineLazyModuleGetter(this, symbol,
+ "resource://services-sync/keys.js",
+ symbol);
+}
+
+/**
+ * Manages "legacy" identity and authentication for Sync.
+ * See browserid_identity for the Firefox Accounts based identity manager.
+ *
+ * The following entities are managed:
+ *
+ * account - The main Sync/services account. This is typically an email
+ * address.
+ * username - A normalized version of your account. This is what's
+ * transmitted to the server.
+ * basic password - UTF-8 password used for authenticating when using HTTP
+ * basic authentication.
+ * sync key - The main encryption key used by Sync.
+ * sync key bundle - A representation of your sync key.
+ *
+ * When changes are made to entities that are stored in the password manager
+ * (basic password, sync key), those changes are merely staged. To commit them
+ * to the password manager, you'll need to call persistCredentials().
+ *
+ * This type also manages authenticating Sync's network requests. Sync's
+ * network code calls into getRESTRequestAuthenticator and
+ * getResourceAuthenticator (depending on the network layer being used). Each
+ * returns a function which can be used to add authentication information to an
+ * outgoing request.
+ *
+ * In theory, this type supports arbitrary identity and authentication
+ * mechanisms. You can add support for them by monkeypatching the global
+ * instance of this type. Specifically, you'll need to redefine the
+ * aforementioned network code functions to do whatever your authentication
+ * mechanism needs them to do. In addition, you may wish to install custom
+ * functions to support your API. Although, that is certainly not required.
+ * If you do monkeypatch, please be advised that Sync expects the core
+ * attributes to have values. You will need to carry at least account and
+ * username forward. If you do not wish to support one of the built-in
+ * authentication mechanisms, you'll probably want to redefine currentAuthState
+ * and any other function that involves the built-in functionality.
+ */
+this.IdentityManager = function IdentityManager() {
+ this._log = Log.repository.getLogger("Sync.Identity");
+ this._log.Level = Log.Level[Svc.Prefs.get("log.logger.identity")];
+
+ this._basicPassword = null;
+ this._basicPasswordAllowLookup = true;
+ this._basicPasswordUpdated = false;
+ this._syncKey = null;
+ this._syncKeyAllowLookup = true;
+ this._syncKeySet = false;
+ this._syncKeyBundle = null;
+}
+IdentityManager.prototype = {
+ _log: null,
+
+ _basicPassword: null,
+ _basicPasswordAllowLookup: true,
+ _basicPasswordUpdated: false,
+
+ _syncKey: null,
+ _syncKeyAllowLookup: true,
+ _syncKeySet: false,
+
+ _syncKeyBundle: null,
+
+ /**
+ * Initialize the identity provider.
+ */
+ initialize: function() {
+ // Nothing to do for this identity provider.
+ },
+
+ finalize: function() {
+ // Nothing to do for this identity provider.
+ },
+
+ /**
+ * Called whenever Service.logout() is called.
+ */
+ logout: function() {
+ // nothing to do for this identity provider.
+ },
+
+ /**
+ * Ensure the user is logged in. Returns a promise that resolves when
+ * the user is logged in, or is rejected if the login attempt has failed.
+ */
+ ensureLoggedIn: function() {
+ // nothing to do for this identity provider
+ return Promise.resolve();
+ },
+
+ get account() {
+ return Svc.Prefs.get("account", this.username);
+ },
+
+ /**
+ * Sets the active account name.
+ *
+ * This should almost always be called in favor of setting username, as
+ * username is derived from account.
+ *
+ * Changing the account name has the side-effect of wiping out stored
+ * credentials. Keep in mind that persistCredentials() will need to be called
+ * to flush the changes to disk.
+ *
+ * Set this value to null to clear out identity information.
+ */
+ set account(value) {
+ if (value) {
+ value = value.toLowerCase();
+ Svc.Prefs.set("account", value);
+ } else {
+ Svc.Prefs.reset("account");
+ }
+
+ this.username = this.usernameFromAccount(value);
+ },
+
+ get username() {
+ return Svc.Prefs.get("username", null);
+ },
+
+ /**
+ * Set the username value.
+ *
+ * Changing the username has the side-effect of wiping credentials.
+ */
+ set username(value) {
+ if (value) {
+ value = value.toLowerCase();
+
+ if (value == this.username) {
+ return;
+ }
+
+ Svc.Prefs.set("username", value);
+ } else {
+ Svc.Prefs.reset("username");
+ }
+
+ // If we change the username, we interpret this as a major change event
+ // and wipe out the credentials.
+ this._log.info("Username changed. Removing stored credentials.");
+ this.resetCredentials();
+ },
+
+ /**
+ * Resets/Drops all credentials we hold for the current user.
+ */
+ resetCredentials: function() {
+ this.basicPassword = null;
+ this.resetSyncKey();
+ },
+
+ /**
+ * Resets/Drops the sync key we hold for the current user.
+ */
+ resetSyncKey: function() {
+ this.syncKey = null;
+ // syncKeyBundle cleared as a result of setting syncKey.
+ },
+
+ /**
+ * Obtains the HTTP Basic auth password.
+ *
+ * Returns a string if set or null if it is not set.
+ */
+ get basicPassword() {
+ if (this._basicPasswordAllowLookup) {
+ // We need a username to find the credentials.
+ let username = this.username;
+ if (!username) {
+ return null;
+ }
+
+ for (let login of this._getLogins(PWDMGR_PASSWORD_REALM)) {
+ if (login.username.toLowerCase() == username) {
+ // It should already be UTF-8 encoded, but we don't take any chances.
+ this._basicPassword = Utils.encodeUTF8(login.password);
+ }
+ }
+
+ this._basicPasswordAllowLookup = false;
+ }
+
+ return this._basicPassword;
+ },
+
+ /**
+ * Set the HTTP basic password to use.
+ *
+ * Changes will not persist unless persistSyncCredentials() is called.
+ */
+ set basicPassword(value) {
+ // Wiping out value.
+ if (!value) {
+ this._log.info("Basic password has no value. Removing.");
+ this._basicPassword = null;
+ this._basicPasswordUpdated = true;
+ this._basicPasswordAllowLookup = false;
+ return;
+ }
+
+ let username = this.username;
+ if (!username) {
+ throw new Error("basicPassword cannot be set before username.");
+ }
+
+ this._log.info("Basic password being updated.");
+ this._basicPassword = Utils.encodeUTF8(value);
+ this._basicPasswordUpdated = true;
+ },
+
+ /**
+ * Obtain the Sync Key.
+ *
+ * This returns a 26 character "friendly" Base32 encoded string on success or
+ * null if no Sync Key could be found.
+ *
+ * If the Sync Key hasn't been set in this session, this will look in the
+ * password manager for the sync key.
+ */
+ get syncKey() {
+ if (this._syncKeyAllowLookup) {
+ let username = this.username;
+ if (!username) {
+ return null;
+ }
+
+ for (let login of this._getLogins(PWDMGR_PASSPHRASE_REALM)) {
+ if (login.username.toLowerCase() == username) {
+ this._syncKey = login.password;
+ }
+ }
+
+ this._syncKeyAllowLookup = false;
+ }
+
+ return this._syncKey;
+ },
+
+ /**
+ * Set the active Sync Key.
+ *
+ * If being set to null, the Sync Key and its derived SyncKeyBundle are
+ * removed. However, the Sync Key won't be deleted from the password manager
+ * until persistSyncCredentials() is called.
+ *
+ * If a value is provided, it should be a 26 or 32 character "friendly"
+ * Base32 string for which Utils.isPassphrase() returns true.
+ *
+ * A side-effect of setting the Sync Key is that a SyncKeyBundle is
+ * generated. For historical reasons, this will silently error out if the
+ * value is not a proper Sync Key (!Utils.isPassphrase()). This should be
+ * fixed in the future (once service.js is more sane) to throw if the passed
+ * value is not valid.
+ */
+ set syncKey(value) {
+ if (!value) {
+ this._log.info("Sync Key has no value. Deleting.");
+ this._syncKey = null;
+ this._syncKeyBundle = null;
+ this._syncKeyUpdated = true;
+ return;
+ }
+
+ if (!this.username) {
+ throw new Error("syncKey cannot be set before username.");
+ }
+
+ this._log.info("Sync Key being updated.");
+ this._syncKey = value;
+
+ // Clear any cached Sync Key Bundle and regenerate it.
+ this._syncKeyBundle = null;
+ let bundle = this.syncKeyBundle;
+
+ this._syncKeyUpdated = true;
+ },
+
+ /**
+ * Obtain the active SyncKeyBundle.
+ *
+ * This returns a SyncKeyBundle representing a key pair derived from the
+ * Sync Key on success. If no Sync Key is present or if the Sync Key is not
+ * valid, this returns null.
+ *
+ * The SyncKeyBundle should be treated as immutable.
+ */
+ get syncKeyBundle() {
+ // We can't obtain a bundle without a username set.
+ if (!this.username) {
+ this._log.warn("Attempted to obtain Sync Key Bundle with no username set!");
+ return null;
+ }
+
+ if (!this.syncKey) {
+ this._log.warn("Attempted to obtain Sync Key Bundle with no Sync Key " +
+ "set!");
+ return null;
+ }
+
+ if (!this._syncKeyBundle) {
+ try {
+ this._syncKeyBundle = new SyncKeyBundle(this.username, this.syncKey);
+ } catch (ex) {
+ this._log.warn("Failed to create sync bundle", ex);
+ return null;
+ }
+ }
+
+ return this._syncKeyBundle;
+ },
+
+ /**
+ * The current state of the auth credentials.
+ *
+ * This essentially validates that enough credentials are available to use
+ * Sync.
+ */
+ get currentAuthState() {
+ if (!this.username) {
+ return LOGIN_FAILED_NO_USERNAME;
+ }
+
+ if (Utils.mpLocked()) {
+ return STATUS_OK;
+ }
+
+ if (!this.basicPassword) {
+ return LOGIN_FAILED_NO_PASSWORD;
+ }
+
+ if (!this.syncKey) {
+ return LOGIN_FAILED_NO_PASSPHRASE;
+ }
+
+ // If we have a Sync Key but no bundle, bundle creation failed, which
+ // implies a bad Sync Key.
+ if (!this.syncKeyBundle) {
+ return LOGIN_FAILED_INVALID_PASSPHRASE;
+ }
+
+ return STATUS_OK;
+ },
+
+ /**
+ * Verify the current auth state, unlocking the master-password if necessary.
+ *
+ * Returns a promise that resolves with the current auth state after
+ * attempting to unlock.
+ */
+ unlockAndVerifyAuthState: function() {
+ // Try to fetch the passphrase - this will prompt for MP unlock as a
+ // side-effect...
+ try {
+ this.syncKey;
+ } catch (ex) {
+ this._log.debug("Fetching passphrase threw " + ex +
+ "; assuming master password locked.");
+ return Promise.resolve(MASTER_PASSWORD_LOCKED);
+ }
+ return Promise.resolve(STATUS_OK);
+ },
+
+ /**
+ * Persist credentials to password store.
+ *
+ * When credentials are updated, they are changed in memory only. This will
+ * need to be called to save them to the underlying password store.
+ *
+ * If the password store is locked (e.g. if the master password hasn't been
+ * entered), this could throw an exception.
+ */
+ persistCredentials: function persistCredentials(force) {
+ if (this._basicPasswordUpdated || force) {
+ if (this._basicPassword) {
+ this._setLogin(PWDMGR_PASSWORD_REALM, this.username,
+ this._basicPassword);
+ } else {
+ for (let login of this._getLogins(PWDMGR_PASSWORD_REALM)) {
+ Services.logins.removeLogin(login);
+ }
+ }
+
+ this._basicPasswordUpdated = false;
+ }
+
+ if (this._syncKeyUpdated || force) {
+ if (this._syncKey) {
+ this._setLogin(PWDMGR_PASSPHRASE_REALM, this.username, this._syncKey);
+ } else {
+ for (let login of this._getLogins(PWDMGR_PASSPHRASE_REALM)) {
+ Services.logins.removeLogin(login);
+ }
+ }
+
+ this._syncKeyUpdated = false;
+ }
+
+ },
+
+ /**
+ * Deletes the Sync Key from the system.
+ */
+ deleteSyncKey: function deleteSyncKey() {
+ this.syncKey = null;
+ this.persistCredentials();
+ },
+
+ hasBasicCredentials: function hasBasicCredentials() {
+ // Because JavaScript.
+ return this.username && this.basicPassword && true;
+ },
+
+ /**
+ * Pre-fetches any information that might help with migration away from this
+ * identity. Called after every sync and is really just an optimization that
+ * allows us to avoid a network request for when we actually need the
+ * migration info.
+ */
+ prefetchMigrationSentinel: function(service) {
+ // Try and fetch the migration sentinel - it will end up in the recordManager
+ // cache.
+ try {
+ service.recordManager.get(service.storageURL + "meta/fxa_credentials");
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.warn("Failed to pre-fetch the migration sentinel", ex);
+ }
+ },
+
+ /**
+ * Obtains the array of basic logins from nsiPasswordManager.
+ */
+ _getLogins: function _getLogins(realm) {
+ return Services.logins.findLogins({}, PWDMGR_HOST, null, realm);
+ },
+
+ /**
+ * Set a login in the password manager.
+ *
+ * This has the side-effect of deleting any other logins for the specified
+ * realm.
+ */
+ _setLogin: function _setLogin(realm, username, password) {
+ let exists = false;
+ for (let login of this._getLogins(realm)) {
+ if (login.username == username && login.password == password) {
+ exists = true;
+ } else {
+ this._log.debug("Pruning old login for " + username + " from " + realm);
+ Services.logins.removeLogin(login);
+ }
+ }
+
+ if (exists) {
+ return;
+ }
+
+ this._log.debug("Updating saved password for " + username + " in " +
+ realm);
+
+ let loginInfo = new Components.Constructor(
+ "@mozilla.org/login-manager/loginInfo;1", Ci.nsILoginInfo, "init");
+ let login = new loginInfo(PWDMGR_HOST, null, realm, username,
+ password, "", "");
+ Services.logins.addLogin(login);
+ },
+
+ /**
+ * Return credentials hosts for this identity only.
+ */
+ _getSyncCredentialsHosts: function() {
+ return Utils.getSyncCredentialsHostsLegacy();
+ },
+
+ /**
+ * Deletes Sync credentials from the password manager.
+ */
+ deleteSyncCredentials: function deleteSyncCredentials() {
+ for (let host of this._getSyncCredentialsHosts()) {
+ let logins = Services.logins.findLogins({}, host, "", "");
+ for (let login of logins) {
+ Services.logins.removeLogin(login);
+ }
+ }
+
+ // Wait until after store is updated in case it fails.
+ this._basicPassword = null;
+ this._basicPasswordAllowLookup = true;
+ this._basicPasswordUpdated = false;
+
+ this._syncKey = null;
+ // this._syncKeyBundle is nullified as part of _syncKey setter.
+ this._syncKeyAllowLookup = true;
+ this._syncKeyUpdated = false;
+ },
+
+ usernameFromAccount: function usernameFromAccount(value) {
+ // If we encounter characters not allowed by the API (as found for
+ // instance in an email address), hash the value.
+ if (value && value.match(/[^A-Z0-9._-]/i)) {
+ return Utils.sha1Base32(value.toLowerCase()).toLowerCase();
+ }
+
+ return value ? value.toLowerCase() : value;
+ },
+
+ /**
+ * Obtain a function to be used for adding auth to Resource HTTP requests.
+ */
+ getResourceAuthenticator: function getResourceAuthenticator() {
+ if (this.hasBasicCredentials()) {
+ return this._onResourceRequestBasic.bind(this);
+ }
+
+ return null;
+ },
+
+ /**
+ * Helper method to return an authenticator for basic Resource requests.
+ */
+ getBasicResourceAuthenticator:
+ function getBasicResourceAuthenticator(username, password) {
+
+ return function basicAuthenticator(resource) {
+ let value = "Basic " + btoa(username + ":" + password);
+ return {headers: {authorization: value}};
+ };
+ },
+
+ _onResourceRequestBasic: function _onResourceRequestBasic(resource) {
+ let value = "Basic " + btoa(this.username + ":" + this.basicPassword);
+ return {headers: {authorization: value}};
+ },
+
+ _onResourceRequestMAC: function _onResourceRequestMAC(resource, method) {
+ // TODO Get identifier and key from somewhere.
+ let identifier;
+ let key;
+ let result = Utils.computeHTTPMACSHA1(identifier, key, method, resource.uri);
+
+ return {headers: {authorization: result.header}};
+ },
+
+ /**
+ * Obtain a function to be used for adding auth to RESTRequest instances.
+ */
+ getRESTRequestAuthenticator: function getRESTRequestAuthenticator() {
+ if (this.hasBasicCredentials()) {
+ return this.onRESTRequestBasic.bind(this);
+ }
+
+ return null;
+ },
+
+ onRESTRequestBasic: function onRESTRequestBasic(request) {
+ let up = this.username + ":" + this.basicPassword;
+ request.setHeader("authorization", "Basic " + btoa(up));
+ },
+
+ createClusterManager: function(service) {
+ Cu.import("resource://services-sync/stages/cluster.js");
+ return new ClusterManager(service);
+ },
+
+ offerSyncOptions: function () {
+ // Do nothing for Sync 1.1.
+ return {accepted: true};
+ },
+
+ // Tell Sync what the login status should be if it saw a 401 fetching
+ // info/collections as part of login verification (typically immediately
+ // after login.)
+ // In our case it means an authoritative "password is incorrect".
+ loginStatusFromVerification404() {
+ return LOGIN_FAILED_LOGIN_REJECTED;
+ }
+
+};
diff --git a/services/sync/modules/jpakeclient.js b/services/sync/modules/jpakeclient.js
new file mode 100644
index 000000000..625dc91b6
--- /dev/null
+++ b/services/sync/modules/jpakeclient.js
@@ -0,0 +1,773 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["JPAKEClient", "SendCredentialsController"];
+
+var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/util.js");
+
+const REQUEST_TIMEOUT = 60; // 1 minute
+const KEYEXCHANGE_VERSION = 3;
+
+const JPAKE_SIGNERID_SENDER = "sender";
+const JPAKE_SIGNERID_RECEIVER = "receiver";
+const JPAKE_LENGTH_SECRET = 8;
+const JPAKE_LENGTH_CLIENTID = 256;
+const JPAKE_VERIFY_VALUE = "0123456789ABCDEF";
+
+
+/**
+ * Client to exchange encrypted data using the J-PAKE algorithm.
+ * The exchange between two clients of this type looks like this:
+ *
+ *
+ * Mobile Server Desktop
+ * ===================================================================
+ * |
+ * retrieve channel <---------------|
+ * generate random secret |
+ * show PIN = secret + channel | ask user for PIN
+ * upload Mobile's message 1 ------>|
+ * |----> retrieve Mobile's message 1
+ * |<----- upload Desktop's message 1
+ * retrieve Desktop's message 1 <---|
+ * upload Mobile's message 2 ------>|
+ * |----> retrieve Mobile's message 2
+ * | compute key
+ * |<----- upload Desktop's message 2
+ * retrieve Desktop's message 2 <---|
+ * compute key |
+ * encrypt known value ------------>|
+ * |-------> retrieve encrypted value
+ * | verify against local known value
+ *
+ * At this point Desktop knows whether the PIN was entered correctly.
+ * If it wasn't, Desktop deletes the session. If it was, the account
+ * setup can proceed. If Desktop doesn't yet have an account set up,
+ * it will keep the channel open and let the user connect to or
+ * create an account.
+ *
+ * | encrypt credentials
+ * |<------------- upload credentials
+ * retrieve credentials <-----------|
+ * verify HMAC |
+ * decrypt credentials |
+ * delete session ----------------->|
+ * start syncing |
+ *
+ *
+ * Create a client object like so:
+ *
+ * let client = new JPAKEClient(controller);
+ *
+ * The 'controller' object must implement the following methods:
+ *
+ * displayPIN(pin) -- Called when a PIN has been generated and is ready to
+ * be displayed to the user. Only called on the client where the pairing
+ * was initiated with 'receiveNoPIN()'.
+ *
+ * onPairingStart() -- Called when the pairing has started and messages are
+ * being sent back and forth over the channel. Only called on the client
+ * where the pairing was initiated with 'receiveNoPIN()'.
+ *
+ * onPaired() -- Called when the device pairing has been established and
+ * we're ready to send the credentials over. To do that, the controller
+ * must call 'sendAndComplete()' while the channel is active.
+ *
+ * onComplete(data) -- Called after transfer has been completed. On
+ * the sending side this is called with no parameter and as soon as the
+ * data has been uploaded. This does not mean the receiving side has
+ * actually retrieved them yet.
+ *
+ * onAbort(error) -- Called whenever an error is encountered. All errors lead
+ * to an abort and the process has to be started again on both sides.
+ *
+ * To start the data transfer on the receiving side, call
+ *
+ * client.receiveNoPIN();
+ *
+ * This will allocate a new channel on the server, generate a PIN, have it
+ * displayed and then do the transfer once the protocol has been completed
+ * with the sending side.
+ *
+ * To initiate the transfer from the sending side, call
+ *
+ * client.pairWithPIN(pin, true);
+ *
+ * Once the pairing has been established, the controller's 'onPaired()' method
+ * will be called. To then transmit the data, call
+ *
+ * client.sendAndComplete(data);
+ *
+ * To abort the process, call
+ *
+ * client.abort();
+ *
+ * Note that after completion or abort, the 'client' instance may not be reused.
+ * You will have to create a new one in case you'd like to restart the process.
+ */
+this.JPAKEClient = function JPAKEClient(controller) {
+ this.controller = controller;
+
+ this._log = Log.repository.getLogger("Sync.JPAKEClient");
+ this._log.level = Log.Level[Svc.Prefs.get(
+ "log.logger.service.jpakeclient", "Debug")];
+
+ this._serverURL = Svc.Prefs.get("jpake.serverURL");
+ this._pollInterval = Svc.Prefs.get("jpake.pollInterval");
+ this._maxTries = Svc.Prefs.get("jpake.maxTries");
+ if (this._serverURL.slice(-1) != "/") {
+ this._serverURL += "/";
+ }
+
+ this._jpake = Cc["@mozilla.org/services-crypto/sync-jpake;1"]
+ .createInstance(Ci.nsISyncJPAKE);
+
+ this._setClientID();
+}
+JPAKEClient.prototype = {
+
+ _chain: Async.chain,
+
+ /*
+ * Public API
+ */
+
+ /**
+ * Initiate pairing and receive data without providing a PIN. The PIN will
+ * be generated and passed on to the controller to be displayed to the user.
+ *
+ * This is typically called on mobile devices where typing is tedious.
+ */
+ receiveNoPIN: function receiveNoPIN() {
+ this._my_signerid = JPAKE_SIGNERID_RECEIVER;
+ this._their_signerid = JPAKE_SIGNERID_SENDER;
+
+ this._secret = this._createSecret();
+
+ // Allow a large number of tries first while we wait for the PIN
+ // to be entered on the other device.
+ this._maxTries = Svc.Prefs.get("jpake.firstMsgMaxTries");
+ this._chain(this._getChannel,
+ this._computeStepOne,
+ this._putStep,
+ this._getStep,
+ function(callback) {
+ // We fetched the first response from the other client.
+ // Notify controller of the pairing starting.
+ Utils.nextTick(this.controller.onPairingStart,
+ this.controller);
+
+ // Now we can switch back to the smaller timeout.
+ this._maxTries = Svc.Prefs.get("jpake.maxTries");
+ callback();
+ },
+ this._computeStepTwo,
+ this._putStep,
+ this._getStep,
+ this._computeFinal,
+ this._computeKeyVerification,
+ this._putStep,
+ function(callback) {
+ // Allow longer time-out for the last message.
+ this._maxTries = Svc.Prefs.get("jpake.lastMsgMaxTries");
+ callback();
+ },
+ this._getStep,
+ this._decryptData,
+ this._complete)();
+ },
+
+ /**
+ * Initiate pairing based on the PIN entered by the user.
+ *
+ * This is typically called on desktop devices where typing is easier than
+ * on mobile.
+ *
+ * @param pin
+ * 12 character string (in human-friendly base32) containing the PIN
+ * entered by the user.
+ * @param expectDelay
+ * Flag that indicates that a significant delay between the pairing
+ * and the sending should be expected. v2 and earlier of the protocol
+ * did not allow for this and the pairing to a v2 or earlier client
+ * will be aborted if this flag is 'true'.
+ */
+ pairWithPIN: function pairWithPIN(pin, expectDelay) {
+ this._my_signerid = JPAKE_SIGNERID_SENDER;
+ this._their_signerid = JPAKE_SIGNERID_RECEIVER;
+
+ this._channel = pin.slice(JPAKE_LENGTH_SECRET);
+ this._channelURL = this._serverURL + this._channel;
+ this._secret = pin.slice(0, JPAKE_LENGTH_SECRET);
+
+ this._chain(this._computeStepOne,
+ this._getStep,
+ function (callback) {
+ // Ensure that the other client can deal with a delay for
+ // the last message if that's requested by the caller.
+ if (!expectDelay) {
+ return callback();
+ }
+ if (!this._incoming.version || this._incoming.version < 3) {
+ return this.abort(JPAKE_ERROR_DELAYUNSUPPORTED);
+ }
+ return callback();
+ },
+ this._putStep,
+ this._computeStepTwo,
+ this._getStep,
+ this._putStep,
+ this._computeFinal,
+ this._getStep,
+ this._verifyPairing)();
+ },
+
+ /**
+ * Send data after a successful pairing.
+ *
+ * @param obj
+ * Object containing the data to send. It will be serialized as JSON.
+ */
+ sendAndComplete: function sendAndComplete(obj) {
+ if (!this._paired || this._finished) {
+ this._log.error("Can't send data, no active pairing!");
+ throw "No active pairing!";
+ }
+ this._data = JSON.stringify(obj);
+ this._chain(this._encryptData,
+ this._putStep,
+ this._complete)();
+ },
+
+ /**
+ * Abort the current pairing. The channel on the server will be deleted
+ * if the abort wasn't due to a network or server error. The controller's
+ * 'onAbort()' method is notified in all cases.
+ *
+ * @param error [optional]
+ * Error constant indicating the reason for the abort. Defaults to
+ * user abort.
+ */
+ abort: function abort(error) {
+ this._log.debug("Aborting...");
+ this._finished = true;
+ let self = this;
+
+ // Default to "user aborted".
+ if (!error) {
+ error = JPAKE_ERROR_USERABORT;
+ }
+
+ if (error == JPAKE_ERROR_CHANNEL ||
+ error == JPAKE_ERROR_NETWORK ||
+ error == JPAKE_ERROR_NODATA) {
+ Utils.nextTick(function() { this.controller.onAbort(error); }, this);
+ } else {
+ this._reportFailure(error, function() { self.controller.onAbort(error); });
+ }
+ },
+
+ /*
+ * Utilities
+ */
+
+ _setClientID: function _setClientID() {
+ let rng = Cc["@mozilla.org/security/random-generator;1"]
+ .createInstance(Ci.nsIRandomGenerator);
+ let bytes = rng.generateRandomBytes(JPAKE_LENGTH_CLIENTID / 2);
+ this._clientID = bytes.map(byte => ("0" + byte.toString(16)).slice(-2)).join("");
+ },
+
+ _createSecret: function _createSecret() {
+ // 0-9a-z without 1,l,o,0
+ const key = "23456789abcdefghijkmnpqrstuvwxyz";
+ let rng = Cc["@mozilla.org/security/random-generator;1"]
+ .createInstance(Ci.nsIRandomGenerator);
+ let bytes = rng.generateRandomBytes(JPAKE_LENGTH_SECRET);
+ return bytes.map(byte => key[Math.floor(byte * key.length / 256)]).join("");
+ },
+
+ _newRequest: function _newRequest(uri) {
+ let request = new RESTRequest(uri);
+ request.setHeader("X-KeyExchange-Id", this._clientID);
+ request.timeout = REQUEST_TIMEOUT;
+ return request;
+ },
+
+ /*
+ * Steps of J-PAKE procedure
+ */
+
+ _getChannel: function _getChannel(callback) {
+ this._log.trace("Requesting channel.");
+ let request = this._newRequest(this._serverURL + "new_channel");
+ request.get(Utils.bind2(this, function handleChannel(error) {
+ if (this._finished) {
+ return;
+ }
+
+ if (error) {
+ this._log.error("Error acquiring channel ID. " + error);
+ this.abort(JPAKE_ERROR_CHANNEL);
+ return;
+ }
+ if (request.response.status != 200) {
+ this._log.error("Error acquiring channel ID. Server responded with HTTP "
+ + request.response.status);
+ this.abort(JPAKE_ERROR_CHANNEL);
+ return;
+ }
+
+ try {
+ this._channel = JSON.parse(request.response.body);
+ } catch (ex) {
+ this._log.error("Server responded with invalid JSON.");
+ this.abort(JPAKE_ERROR_CHANNEL);
+ return;
+ }
+ this._log.debug("Using channel " + this._channel);
+ this._channelURL = this._serverURL + this._channel;
+
+ // Don't block on UI code.
+ let pin = this._secret + this._channel;
+ Utils.nextTick(function() { this.controller.displayPIN(pin); }, this);
+ callback();
+ }));
+ },
+
+ // Generic handler for uploading data.
+ _putStep: function _putStep(callback) {
+ this._log.trace("Uploading message " + this._outgoing.type);
+ let request = this._newRequest(this._channelURL);
+ if (this._their_etag) {
+ request.setHeader("If-Match", this._their_etag);
+ } else {
+ request.setHeader("If-None-Match", "*");
+ }
+ request.put(this._outgoing, Utils.bind2(this, function (error) {
+ if (this._finished) {
+ return;
+ }
+
+ if (error) {
+ this._log.error("Error uploading data. " + error);
+ this.abort(JPAKE_ERROR_NETWORK);
+ return;
+ }
+ if (request.response.status != 200) {
+ this._log.error("Could not upload data. Server responded with HTTP "
+ + request.response.status);
+ this.abort(JPAKE_ERROR_SERVER);
+ return;
+ }
+ // There's no point in returning early here since the next step will
+ // always be a GET so let's pause for twice the poll interval.
+ this._my_etag = request.response.headers["etag"];
+ Utils.namedTimer(function () { callback(); }, this._pollInterval * 2,
+ this, "_pollTimer");
+ }));
+ },
+
+ // Generic handler for polling for and retrieving data.
+ _pollTries: 0,
+ _getStep: function _getStep(callback) {
+ this._log.trace("Retrieving next message.");
+ let request = this._newRequest(this._channelURL);
+ if (this._my_etag) {
+ request.setHeader("If-None-Match", this._my_etag);
+ }
+
+ request.get(Utils.bind2(this, function (error) {
+ if (this._finished) {
+ return;
+ }
+
+ if (error) {
+ this._log.error("Error fetching data. " + error);
+ this.abort(JPAKE_ERROR_NETWORK);
+ return;
+ }
+
+ if (request.response.status == 304) {
+ this._log.trace("Channel hasn't been updated yet. Will try again later.");
+ if (this._pollTries >= this._maxTries) {
+ this._log.error("Tried for " + this._pollTries + " times, aborting.");
+ this.abort(JPAKE_ERROR_TIMEOUT);
+ return;
+ }
+ this._pollTries += 1;
+ Utils.namedTimer(function() { this._getStep(callback); },
+ this._pollInterval, this, "_pollTimer");
+ return;
+ }
+ this._pollTries = 0;
+
+ if (request.response.status == 404) {
+ this._log.error("No data found in the channel.");
+ this.abort(JPAKE_ERROR_NODATA);
+ return;
+ }
+ if (request.response.status != 200) {
+ this._log.error("Could not retrieve data. Server responded with HTTP "
+ + request.response.status);
+ this.abort(JPAKE_ERROR_SERVER);
+ return;
+ }
+
+ this._their_etag = request.response.headers["etag"];
+ if (!this._their_etag) {
+ this._log.error("Server did not supply ETag for message: "
+ + request.response.body);
+ this.abort(JPAKE_ERROR_SERVER);
+ return;
+ }
+
+ try {
+ this._incoming = JSON.parse(request.response.body);
+ } catch (ex) {
+ this._log.error("Server responded with invalid JSON.");
+ this.abort(JPAKE_ERROR_INVALID);
+ return;
+ }
+ this._log.trace("Fetched message " + this._incoming.type);
+ callback();
+ }));
+ },
+
+ _reportFailure: function _reportFailure(reason, callback) {
+ this._log.debug("Reporting failure to server.");
+ let request = this._newRequest(this._serverURL + "report");
+ request.setHeader("X-KeyExchange-Cid", this._channel);
+ request.setHeader("X-KeyExchange-Log", reason);
+ request.post("", Utils.bind2(this, function (error) {
+ if (error) {
+ this._log.warn("Report failed: " + error);
+ } else if (request.response.status != 200) {
+ this._log.warn("Report failed. Server responded with HTTP "
+ + request.response.status);
+ }
+
+ // Do not block on errors, we're done or aborted by now anyway.
+ callback();
+ }));
+ },
+
+ _computeStepOne: function _computeStepOne(callback) {
+ this._log.trace("Computing round 1.");
+ let gx1 = {};
+ let gv1 = {};
+ let r1 = {};
+ let gx2 = {};
+ let gv2 = {};
+ let r2 = {};
+ try {
+ this._jpake.round1(this._my_signerid, gx1, gv1, r1, gx2, gv2, r2);
+ } catch (ex) {
+ this._log.error("JPAKE round 1 threw: " + ex);
+ this.abort(JPAKE_ERROR_INTERNAL);
+ return;
+ }
+ let one = {gx1: gx1.value,
+ gx2: gx2.value,
+ zkp_x1: {gr: gv1.value, b: r1.value, id: this._my_signerid},
+ zkp_x2: {gr: gv2.value, b: r2.value, id: this._my_signerid}};
+ this._outgoing = {type: this._my_signerid + "1",
+ version: KEYEXCHANGE_VERSION,
+ payload: one};
+ this._log.trace("Generated message " + this._outgoing.type);
+ callback();
+ },
+
+ _computeStepTwo: function _computeStepTwo(callback) {
+ this._log.trace("Computing round 2.");
+ if (this._incoming.type != this._their_signerid + "1") {
+ this._log.error("Invalid round 1 message: "
+ + JSON.stringify(this._incoming));
+ this.abort(JPAKE_ERROR_WRONGMESSAGE);
+ return;
+ }
+
+ let step1 = this._incoming.payload;
+ if (!step1 || !step1.zkp_x1 || step1.zkp_x1.id != this._their_signerid
+ || !step1.zkp_x2 || step1.zkp_x2.id != this._their_signerid) {
+ this._log.error("Invalid round 1 payload: " + JSON.stringify(step1));
+ this.abort(JPAKE_ERROR_WRONGMESSAGE);
+ return;
+ }
+
+ let A = {};
+ let gvA = {};
+ let rA = {};
+
+ try {
+ this._jpake.round2(this._their_signerid, this._secret,
+ step1.gx1, step1.zkp_x1.gr, step1.zkp_x1.b,
+ step1.gx2, step1.zkp_x2.gr, step1.zkp_x2.b,
+ A, gvA, rA);
+ } catch (ex) {
+ this._log.error("JPAKE round 2 threw: " + ex);
+ this.abort(JPAKE_ERROR_INTERNAL);
+ return;
+ }
+ let two = {A: A.value,
+ zkp_A: {gr: gvA.value, b: rA.value, id: this._my_signerid}};
+ this._outgoing = {type: this._my_signerid + "2",
+ version: KEYEXCHANGE_VERSION,
+ payload: two};
+ this._log.trace("Generated message " + this._outgoing.type);
+ callback();
+ },
+
+ _computeFinal: function _computeFinal(callback) {
+ if (this._incoming.type != this._their_signerid + "2") {
+ this._log.error("Invalid round 2 message: "
+ + JSON.stringify(this._incoming));
+ this.abort(JPAKE_ERROR_WRONGMESSAGE);
+ return;
+ }
+
+ let step2 = this._incoming.payload;
+ if (!step2 || !step2.zkp_A || step2.zkp_A.id != this._their_signerid) {
+ this._log.error("Invalid round 2 payload: " + JSON.stringify(step1));
+ this.abort(JPAKE_ERROR_WRONGMESSAGE);
+ return;
+ }
+
+ let aes256Key = {};
+ let hmac256Key = {};
+
+ try {
+ this._jpake.final(step2.A, step2.zkp_A.gr, step2.zkp_A.b, HMAC_INPUT,
+ aes256Key, hmac256Key);
+ } catch (ex) {
+ this._log.error("JPAKE final round threw: " + ex);
+ this.abort(JPAKE_ERROR_INTERNAL);
+ return;
+ }
+
+ this._crypto_key = aes256Key.value;
+ let hmac_key = Utils.makeHMACKey(Utils.safeAtoB(hmac256Key.value));
+ this._hmac_hasher = Utils.makeHMACHasher(Ci.nsICryptoHMAC.SHA256, hmac_key);
+
+ callback();
+ },
+
+ _computeKeyVerification: function _computeKeyVerification(callback) {
+ this._log.trace("Encrypting key verification value.");
+ let iv, ciphertext;
+ try {
+ iv = Svc.Crypto.generateRandomIV();
+ ciphertext = Svc.Crypto.encrypt(JPAKE_VERIFY_VALUE,
+ this._crypto_key, iv);
+ } catch (ex) {
+ this._log.error("Failed to encrypt key verification value.");
+ this.abort(JPAKE_ERROR_INTERNAL);
+ return;
+ }
+ this._outgoing = {type: this._my_signerid + "3",
+ version: KEYEXCHANGE_VERSION,
+ payload: {ciphertext: ciphertext, IV: iv}};
+ this._log.trace("Generated message " + this._outgoing.type);
+ callback();
+ },
+
+ _verifyPairing: function _verifyPairing(callback) {
+ this._log.trace("Verifying their key.");
+ if (this._incoming.type != this._their_signerid + "3") {
+ this._log.error("Invalid round 3 data: " +
+ JSON.stringify(this._incoming));
+ this.abort(JPAKE_ERROR_WRONGMESSAGE);
+ return;
+ }
+ let step3 = this._incoming.payload;
+ let ciphertext;
+ try {
+ ciphertext = Svc.Crypto.encrypt(JPAKE_VERIFY_VALUE,
+ this._crypto_key, step3.IV);
+ if (ciphertext != step3.ciphertext) {
+ throw "Key mismatch!";
+ }
+ } catch (ex) {
+ this._log.error("Keys don't match!");
+ this.abort(JPAKE_ERROR_KEYMISMATCH);
+ return;
+ }
+
+ this._log.debug("Verified pairing!");
+ this._paired = true;
+ Utils.nextTick(function () { this.controller.onPaired(); }, this);
+ callback();
+ },
+
+ _encryptData: function _encryptData(callback) {
+ this._log.trace("Encrypting data.");
+ let iv, ciphertext, hmac;
+ try {
+ iv = Svc.Crypto.generateRandomIV();
+ ciphertext = Svc.Crypto.encrypt(this._data, this._crypto_key, iv);
+ hmac = Utils.bytesAsHex(Utils.digestUTF8(ciphertext, this._hmac_hasher));
+ } catch (ex) {
+ this._log.error("Failed to encrypt data.");
+ this.abort(JPAKE_ERROR_INTERNAL);
+ return;
+ }
+ this._outgoing = {type: this._my_signerid + "3",
+ version: KEYEXCHANGE_VERSION,
+ payload: {ciphertext: ciphertext, IV: iv, hmac: hmac}};
+ this._log.trace("Generated message " + this._outgoing.type);
+ callback();
+ },
+
+ _decryptData: function _decryptData(callback) {
+ this._log.trace("Verifying their key.");
+ if (this._incoming.type != this._their_signerid + "3") {
+ this._log.error("Invalid round 3 data: "
+ + JSON.stringify(this._incoming));
+ this.abort(JPAKE_ERROR_WRONGMESSAGE);
+ return;
+ }
+ let step3 = this._incoming.payload;
+ try {
+ let hmac = Utils.bytesAsHex(
+ Utils.digestUTF8(step3.ciphertext, this._hmac_hasher));
+ if (hmac != step3.hmac) {
+ throw "HMAC validation failed!";
+ }
+ } catch (ex) {
+ this._log.error("HMAC validation failed.");
+ this.abort(JPAKE_ERROR_KEYMISMATCH);
+ return;
+ }
+
+ this._log.trace("Decrypting data.");
+ let cleartext;
+ try {
+ cleartext = Svc.Crypto.decrypt(step3.ciphertext, this._crypto_key,
+ step3.IV);
+ } catch (ex) {
+ this._log.error("Failed to decrypt data.");
+ this.abort(JPAKE_ERROR_INTERNAL);
+ return;
+ }
+
+ try {
+ this._newData = JSON.parse(cleartext);
+ } catch (ex) {
+ this._log.error("Invalid data data: " + JSON.stringify(cleartext));
+ this.abort(JPAKE_ERROR_INVALID);
+ return;
+ }
+
+ this._log.trace("Decrypted data.");
+ callback();
+ },
+
+ _complete: function _complete() {
+ this._log.debug("Exchange completed.");
+ this._finished = true;
+ Utils.nextTick(function () { this.controller.onComplete(this._newData); },
+ this);
+ }
+
+};
+
+
+/**
+ * Send credentials over an active J-PAKE channel.
+ *
+ * This object is designed to take over as the JPAKEClient controller,
+ * presumably replacing one that is UI-based which would either cause
+ * DOM objects to leak or the JPAKEClient to be GC'ed when the DOM
+ * context disappears. This object stays alive for the duration of the
+ * transfer by being strong-ref'ed as an nsIObserver.
+ *
+ * Credentials are sent after the first sync has been completed
+ * (successfully or not.)
+ *
+ * Usage:
+ *
+ * jpakeclient.controller = new SendCredentialsController(jpakeclient,
+ * service);
+ *
+ */
+this.SendCredentialsController =
+ function SendCredentialsController(jpakeclient, service) {
+ this._log = Log.repository.getLogger("Sync.SendCredentialsController");
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.service.main")];
+
+ this._log.trace("Loading.");
+ this.jpakeclient = jpakeclient;
+ this.service = service;
+
+ // Register ourselves as observers the first Sync finishing (either
+ // successfully or unsuccessfully, we don't care) or for removing
+ // this device's sync configuration, in case that happens while we
+ // haven't finished the first sync yet.
+ Services.obs.addObserver(this, "weave:service:sync:finish", false);
+ Services.obs.addObserver(this, "weave:service:sync:error", false);
+ Services.obs.addObserver(this, "weave:service:start-over", false);
+}
+SendCredentialsController.prototype = {
+
+ unload: function unload() {
+ this._log.trace("Unloading.");
+ try {
+ Services.obs.removeObserver(this, "weave:service:sync:finish");
+ Services.obs.removeObserver(this, "weave:service:sync:error");
+ Services.obs.removeObserver(this, "weave:service:start-over");
+ } catch (ex) {
+ // Ignore.
+ }
+ },
+
+ observe: function observe(subject, topic, data) {
+ switch (topic) {
+ case "weave:service:sync:finish":
+ case "weave:service:sync:error":
+ Utils.nextTick(this.sendCredentials, this);
+ break;
+ case "weave:service:start-over":
+ // This will call onAbort which will call unload().
+ this.jpakeclient.abort();
+ break;
+ }
+ },
+
+ sendCredentials: function sendCredentials() {
+ this._log.trace("Sending credentials.");
+ let credentials = {account: this.service.identity.account,
+ password: this.service.identity.basicPassword,
+ synckey: this.service.identity.syncKey,
+ serverURL: this.service.serverURL};
+ this.jpakeclient.sendAndComplete(credentials);
+ },
+
+ // JPAKEClient controller API
+
+ onComplete: function onComplete() {
+ this._log.debug("Exchange was completed successfully!");
+ this.unload();
+
+ // Schedule a Sync for soonish to fetch the data uploaded by the
+ // device with which we just paired.
+ this.service.scheduler.scheduleNextSync(this.service.scheduler.activeInterval);
+ },
+
+ onAbort: function onAbort(error) {
+ // It doesn't really matter why we aborted, but the channel is closed
+ // for sure, so we won't be able to do anything with it.
+ this._log.debug("Exchange was aborted with error: " + error);
+ this.unload();
+ },
+
+ // Irrelevant methods for this controller:
+ displayPIN: function displayPIN() {},
+ onPairingStart: function onPairingStart() {},
+ onPaired: function onPaired() {},
+};
diff --git a/services/sync/modules/keys.js b/services/sync/modules/keys.js
new file mode 100644
index 000000000..b93de7f31
--- /dev/null
+++ b/services/sync/modules/keys.js
@@ -0,0 +1,214 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "BulkKeyBundle",
+ "SyncKeyBundle"
+];
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/util.js");
+
+/**
+ * Represents a pair of keys.
+ *
+ * Each key stored in a key bundle is 256 bits. One key is used for symmetric
+ * encryption. The other is used for HMAC.
+ *
+ * A KeyBundle by itself is just an anonymous pair of keys. Other types
+ * deriving from this one add semantics, such as associated collections or
+ * generating a key bundle via HKDF from another key.
+ */
+function KeyBundle() {
+ this._encrypt = null;
+ this._encryptB64 = null;
+ this._hmac = null;
+ this._hmacB64 = null;
+ this._hmacObj = null;
+ this._sha256HMACHasher = null;
+}
+KeyBundle.prototype = {
+ _encrypt: null,
+ _encryptB64: null,
+ _hmac: null,
+ _hmacB64: null,
+ _hmacObj: null,
+ _sha256HMACHasher: null,
+
+ equals: function equals(bundle) {
+ return bundle &&
+ (bundle.hmacKey == this.hmacKey) &&
+ (bundle.encryptionKey == this.encryptionKey);
+ },
+
+ /*
+ * Accessors for the two keys.
+ */
+ get encryptionKey() {
+ return this._encrypt;
+ },
+
+ set encryptionKey(value) {
+ if (!value || typeof value != "string") {
+ throw new Error("Encryption key can only be set to string values.");
+ }
+
+ if (value.length < 16) {
+ throw new Error("Encryption key must be at least 128 bits long.");
+ }
+
+ this._encrypt = value;
+ this._encryptB64 = btoa(value);
+ },
+
+ get encryptionKeyB64() {
+ return this._encryptB64;
+ },
+
+ get hmacKey() {
+ return this._hmac;
+ },
+
+ set hmacKey(value) {
+ if (!value || typeof value != "string") {
+ throw new Error("HMAC key can only be set to string values.");
+ }
+
+ if (value.length < 16) {
+ throw new Error("HMAC key must be at least 128 bits long.");
+ }
+
+ this._hmac = value;
+ this._hmacB64 = btoa(value);
+ this._hmacObj = value ? Utils.makeHMACKey(value) : null;
+ this._sha256HMACHasher = value ? Utils.makeHMACHasher(
+ Ci.nsICryptoHMAC.SHA256, this._hmacObj) : null;
+ },
+
+ get hmacKeyB64() {
+ return this._hmacB64;
+ },
+
+ get hmacKeyObject() {
+ return this._hmacObj;
+ },
+
+ get sha256HMACHasher() {
+ return this._sha256HMACHasher;
+ },
+
+ /**
+ * Populate this key pair with 2 new, randomly generated keys.
+ */
+ generateRandom: function generateRandom() {
+ let generatedHMAC = Svc.Crypto.generateRandomKey();
+ let generatedEncr = Svc.Crypto.generateRandomKey();
+ this.keyPairB64 = [generatedEncr, generatedHMAC];
+ },
+
+};
+
+/**
+ * Represents a KeyBundle associated with a collection.
+ *
+ * This is just a KeyBundle with a collection attached.
+ */
+this.BulkKeyBundle = function BulkKeyBundle(collection) {
+ let log = Log.repository.getLogger("Sync.BulkKeyBundle");
+ log.info("BulkKeyBundle being created for " + collection);
+ KeyBundle.call(this);
+
+ this._collection = collection;
+}
+
+BulkKeyBundle.prototype = {
+ __proto__: KeyBundle.prototype,
+
+ get collection() {
+ return this._collection;
+ },
+
+ /**
+ * Obtain the key pair in this key bundle.
+ *
+ * The returned keys are represented as raw byte strings.
+ */
+ get keyPair() {
+ return [this.encryptionKey, this.hmacKey];
+ },
+
+ set keyPair(value) {
+ if (!Array.isArray(value) || value.length != 2) {
+ throw new Error("BulkKeyBundle.keyPair value must be array of 2 keys.");
+ }
+
+ this.encryptionKey = value[0];
+ this.hmacKey = value[1];
+ },
+
+ get keyPairB64() {
+ return [this.encryptionKeyB64, this.hmacKeyB64];
+ },
+
+ set keyPairB64(value) {
+ if (!Array.isArray(value) || value.length != 2) {
+ throw new Error("BulkKeyBundle.keyPairB64 value must be an array of 2 " +
+ "keys.");
+ }
+
+ this.encryptionKey = Utils.safeAtoB(value[0]);
+ this.hmacKey = Utils.safeAtoB(value[1]);
+ },
+};
+
+/**
+ * Represents a key pair derived from a Sync Key via HKDF.
+ *
+ * Instances of this type should be considered immutable. You create an
+ * instance by specifying the username and 26 character "friendly" Base32
+ * encoded Sync Key. The Sync Key is derived at instance creation time.
+ *
+ * If the username or Sync Key is invalid, an Error will be thrown.
+ */
+this.SyncKeyBundle = function SyncKeyBundle(username, syncKey) {
+ let log = Log.repository.getLogger("Sync.SyncKeyBundle");
+ log.info("SyncKeyBundle being created.");
+ KeyBundle.call(this);
+
+ this.generateFromKey(username, syncKey);
+}
+SyncKeyBundle.prototype = {
+ __proto__: KeyBundle.prototype,
+
+ /*
+ * If we've got a string, hash it into keys and store them.
+ */
+ generateFromKey: function generateFromKey(username, syncKey) {
+ if (!username || (typeof username != "string")) {
+ throw new Error("Sync Key cannot be generated from non-string username.");
+ }
+
+ if (!syncKey || (typeof syncKey != "string")) {
+ throw new Error("Sync Key cannot be generated from non-string key.");
+ }
+
+ if (!Utils.isPassphrase(syncKey)) {
+ throw new Error("Provided key is not a passphrase, cannot derive Sync " +
+ "Key Bundle.");
+ }
+
+ // Expand the base32 Sync Key to an AES 256 and 256 bit HMAC key.
+ let prk = Utils.decodeKeyBase32(syncKey);
+ let info = HMAC_INPUT + username;
+ let okm = Utils.hkdfExpand(prk, info, 32 * 2);
+ this.encryptionKey = okm.slice(0, 32);
+ this.hmacKey = okm.slice(32, 64);
+ },
+};
+
diff --git a/services/sync/modules/main.js b/services/sync/modules/main.js
new file mode 100644
index 000000000..af3399e7a
--- /dev/null
+++ b/services/sync/modules/main.js
@@ -0,0 +1,30 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ['Weave'];
+
+this.Weave = {};
+Components.utils.import("resource://services-sync/constants.js", Weave);
+var lazies = {
+ "jpakeclient.js": ["JPAKEClient", "SendCredentialsController"],
+ "service.js": ["Service"],
+ "status.js": ["Status"],
+ "util.js": ['Utils', 'Svc']
+};
+
+function lazyImport(module, dest, props) {
+ function getter(prop) {
+ return function() {
+ let ns = {};
+ Components.utils.import(module, ns);
+ delete dest[prop];
+ return dest[prop] = ns[prop];
+ };
+ }
+ props.forEach(function (prop) { dest.__defineGetter__(prop, getter(prop)); });
+}
+
+for (let mod in lazies) {
+ lazyImport("resource://services-sync/" + mod, Weave, lazies[mod]);
+}
diff --git a/services/sync/modules/policies.js b/services/sync/modules/policies.js
new file mode 100644
index 000000000..a3933426d
--- /dev/null
+++ b/services/sync/modules/policies.js
@@ -0,0 +1,983 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = [
+ "ErrorHandler",
+ "SyncScheduler",
+];
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/logmanager.js");
+Cu.import("resource://services-common/async.js");
+
+XPCOMUtils.defineLazyModuleGetter(this, "Status",
+ "resource://services-sync/status.js");
+XPCOMUtils.defineLazyModuleGetter(this, "AddonManager",
+ "resource://gre/modules/AddonManager.jsm");
+
+// Get the value for an interval that's stored in preferences. To save users
+// from themselves (and us from them!) the minimum time they can specify
+// is 60s.
+function getThrottledIntervalPreference(prefName) {
+ return Math.max(Svc.Prefs.get(prefName), 60) * 1000;
+}
+
+this.SyncScheduler = function SyncScheduler(service) {
+ this.service = service;
+ this.init();
+}
+SyncScheduler.prototype = {
+ _log: Log.repository.getLogger("Sync.SyncScheduler"),
+
+ _fatalLoginStatus: [LOGIN_FAILED_NO_USERNAME,
+ LOGIN_FAILED_NO_PASSWORD,
+ LOGIN_FAILED_NO_PASSPHRASE,
+ LOGIN_FAILED_INVALID_PASSPHRASE,
+ LOGIN_FAILED_LOGIN_REJECTED],
+
+ /**
+ * The nsITimer object that schedules the next sync. See scheduleNextSync().
+ */
+ syncTimer: null,
+
+ setDefaults: function setDefaults() {
+ this._log.trace("Setting SyncScheduler policy values to defaults.");
+
+ let service = Cc["@mozilla.org/weave/service;1"]
+ .getService(Ci.nsISupports)
+ .wrappedJSObject;
+
+ let part = service.fxAccountsEnabled ? "fxa" : "sync11";
+ let prefSDInterval = "scheduler." + part + ".singleDeviceInterval";
+ this.singleDeviceInterval = getThrottledIntervalPreference(prefSDInterval);
+
+ this.idleInterval = getThrottledIntervalPreference("scheduler.idleInterval");
+ this.activeInterval = getThrottledIntervalPreference("scheduler.activeInterval");
+ this.immediateInterval = getThrottledIntervalPreference("scheduler.immediateInterval");
+ this.eolInterval = getThrottledIntervalPreference("scheduler.eolInterval");
+
+ // A user is non-idle on startup by default.
+ this.idle = false;
+
+ this.hasIncomingItems = false;
+
+ this.clearSyncTriggers();
+ },
+
+ // nextSync is in milliseconds, but prefs can't hold that much
+ get nextSync() {
+ return Svc.Prefs.get("nextSync", 0) * 1000;
+ },
+ set nextSync(value) {
+ Svc.Prefs.set("nextSync", Math.floor(value / 1000));
+ },
+
+ get syncInterval() {
+ return Svc.Prefs.get("syncInterval", this.singleDeviceInterval);
+ },
+ set syncInterval(value) {
+ Svc.Prefs.set("syncInterval", value);
+ },
+
+ get syncThreshold() {
+ return Svc.Prefs.get("syncThreshold", SINGLE_USER_THRESHOLD);
+ },
+ set syncThreshold(value) {
+ Svc.Prefs.set("syncThreshold", value);
+ },
+
+ get globalScore() {
+ return Svc.Prefs.get("globalScore", 0);
+ },
+ set globalScore(value) {
+ Svc.Prefs.set("globalScore", value);
+ },
+
+ get numClients() {
+ return Svc.Prefs.get("numClients", 0);
+ },
+ set numClients(value) {
+ Svc.Prefs.set("numClients", value);
+ },
+
+ init: function init() {
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.service.main")];
+ this.setDefaults();
+ Svc.Obs.add("weave:engine:score:updated", this);
+ Svc.Obs.add("network:offline-status-changed", this);
+ Svc.Obs.add("weave:service:sync:start", this);
+ Svc.Obs.add("weave:service:sync:finish", this);
+ Svc.Obs.add("weave:engine:sync:finish", this);
+ Svc.Obs.add("weave:engine:sync:error", this);
+ Svc.Obs.add("weave:service:login:error", this);
+ Svc.Obs.add("weave:service:logout:finish", this);
+ Svc.Obs.add("weave:service:sync:error", this);
+ Svc.Obs.add("weave:service:backoff:interval", this);
+ Svc.Obs.add("weave:service:ready", this);
+ Svc.Obs.add("weave:engine:sync:applied", this);
+ Svc.Obs.add("weave:service:setup-complete", this);
+ Svc.Obs.add("weave:service:start-over", this);
+ Svc.Obs.add("FxA:hawk:backoff:interval", this);
+
+ if (Status.checkSetup() == STATUS_OK) {
+ Svc.Obs.add("wake_notification", this);
+ Svc.Idle.addIdleObserver(this, Svc.Prefs.get("scheduler.idleTime"));
+ }
+ },
+
+ observe: function observe(subject, topic, data) {
+ this._log.trace("Handling " + topic);
+ switch(topic) {
+ case "weave:engine:score:updated":
+ if (Status.login == LOGIN_SUCCEEDED) {
+ Utils.namedTimer(this.calculateScore, SCORE_UPDATE_DELAY, this,
+ "_scoreTimer");
+ }
+ break;
+ case "network:offline-status-changed":
+ // Whether online or offline, we'll reschedule syncs
+ this._log.trace("Network offline status change: " + data);
+ this.checkSyncStatus();
+ break;
+ case "weave:service:sync:start":
+ // Clear out any potentially pending syncs now that we're syncing
+ this.clearSyncTriggers();
+
+ // reset backoff info, if the server tells us to continue backing off,
+ // we'll handle that later
+ Status.resetBackoff();
+
+ this.globalScore = 0;
+ break;
+ case "weave:service:sync:finish":
+ this.nextSync = 0;
+ this.adjustSyncInterval();
+
+ if (Status.service == SYNC_FAILED_PARTIAL && this.requiresBackoff) {
+ this.requiresBackoff = false;
+ this.handleSyncError();
+ return;
+ }
+
+ let sync_interval;
+ this._syncErrors = 0;
+ if (Status.sync == NO_SYNC_NODE_FOUND) {
+ this._log.trace("Scheduling a sync at interval NO_SYNC_NODE_FOUND.");
+ sync_interval = NO_SYNC_NODE_INTERVAL;
+ }
+ this.scheduleNextSync(sync_interval);
+ break;
+ case "weave:engine:sync:finish":
+ if (data == "clients") {
+ // Update the client mode because it might change what we sync.
+ this.updateClientMode();
+ }
+ break;
+ case "weave:engine:sync:error":
+ // `subject` is the exception thrown by an engine's sync() method.
+ let exception = subject;
+ if (exception.status >= 500 && exception.status <= 504) {
+ this.requiresBackoff = true;
+ }
+ break;
+ case "weave:service:login:error":
+ this.clearSyncTriggers();
+
+ if (Status.login == MASTER_PASSWORD_LOCKED) {
+ // Try again later, just as if we threw an error... only without the
+ // error count.
+ this._log.debug("Couldn't log in: master password is locked.");
+ this._log.trace("Scheduling a sync at MASTER_PASSWORD_LOCKED_RETRY_INTERVAL");
+ this.scheduleAtInterval(MASTER_PASSWORD_LOCKED_RETRY_INTERVAL);
+ } else if (this._fatalLoginStatus.indexOf(Status.login) == -1) {
+ // Not a fatal login error, just an intermittent network or server
+ // issue. Keep on syncin'.
+ this.checkSyncStatus();
+ }
+ break;
+ case "weave:service:logout:finish":
+ // Start or cancel the sync timer depending on if
+ // logged in or logged out
+ this.checkSyncStatus();
+ break;
+ case "weave:service:sync:error":
+ // There may be multiple clients but if the sync fails, client mode
+ // should still be updated so that the next sync has a correct interval.
+ this.updateClientMode();
+ this.adjustSyncInterval();
+ this.nextSync = 0;
+ this.handleSyncError();
+ break;
+ case "FxA:hawk:backoff:interval":
+ case "weave:service:backoff:interval":
+ let requested_interval = subject * 1000;
+ this._log.debug("Got backoff notification: " + requested_interval + "ms");
+ // Leave up to 25% more time for the back off.
+ let interval = requested_interval * (1 + Math.random() * 0.25);
+ Status.backoffInterval = interval;
+ Status.minimumNextSync = Date.now() + requested_interval;
+ this._log.debug("Fuzzed minimum next sync: " + Status.minimumNextSync);
+ break;
+ case "weave:service:ready":
+ // Applications can specify this preference if they want autoconnect
+ // to happen after a fixed delay.
+ let delay = Svc.Prefs.get("autoconnectDelay");
+ if (delay) {
+ this.delayedAutoConnect(delay);
+ }
+ break;
+ case "weave:engine:sync:applied":
+ let numItems = subject.succeeded;
+ this._log.trace("Engine " + data + " successfully applied " + numItems +
+ " items.");
+ if (numItems) {
+ this.hasIncomingItems = true;
+ }
+ break;
+ case "weave:service:setup-complete":
+ Services.prefs.savePrefFile(null);
+ Svc.Idle.addIdleObserver(this, Svc.Prefs.get("scheduler.idleTime"));
+ Svc.Obs.add("wake_notification", this);
+ break;
+ case "weave:service:start-over":
+ this.setDefaults();
+ try {
+ Svc.Idle.removeIdleObserver(this, Svc.Prefs.get("scheduler.idleTime"));
+ } catch (ex) {
+ if (ex.result != Cr.NS_ERROR_FAILURE) {
+ throw ex;
+ }
+ // In all likelihood we didn't have an idle observer registered yet.
+ // It's all good.
+ }
+ break;
+ case "idle":
+ this._log.trace("We're idle.");
+ this.idle = true;
+ // Adjust the interval for future syncs. This won't actually have any
+ // effect until the next pending sync (which will happen soon since we
+ // were just active.)
+ this.adjustSyncInterval();
+ break;
+ case "active":
+ this._log.trace("Received notification that we're back from idle.");
+ this.idle = false;
+ Utils.namedTimer(function onBack() {
+ if (this.idle) {
+ this._log.trace("... and we're idle again. " +
+ "Ignoring spurious back notification.");
+ return;
+ }
+
+ this._log.trace("Genuine return from idle. Syncing.");
+ // Trigger a sync if we have multiple clients.
+ if (this.numClients > 1) {
+ this.scheduleNextSync(0);
+ }
+ }, IDLE_OBSERVER_BACK_DELAY, this, "idleDebouncerTimer");
+ break;
+ case "wake_notification":
+ this._log.debug("Woke from sleep.");
+ Utils.nextTick(() => {
+ // Trigger a sync if we have multiple clients. We give it 5 seconds
+ // incase the network is still in the process of coming back up.
+ if (this.numClients > 1) {
+ this._log.debug("More than 1 client. Will sync in 5s.");
+ this.scheduleNextSync(5000);
+ }
+ });
+ break;
+ }
+ },
+
+ adjustSyncInterval: function adjustSyncInterval() {
+ if (Status.eol) {
+ this._log.debug("Server status is EOL; using eolInterval.");
+ this.syncInterval = this.eolInterval;
+ return;
+ }
+
+ if (this.numClients <= 1) {
+ this._log.trace("Adjusting syncInterval to singleDeviceInterval.");
+ this.syncInterval = this.singleDeviceInterval;
+ return;
+ }
+
+ // Only MULTI_DEVICE clients will enter this if statement
+ // since SINGLE_USER clients will be handled above.
+ if (this.idle) {
+ this._log.trace("Adjusting syncInterval to idleInterval.");
+ this.syncInterval = this.idleInterval;
+ return;
+ }
+
+ if (this.hasIncomingItems) {
+ this._log.trace("Adjusting syncInterval to immediateInterval.");
+ this.hasIncomingItems = false;
+ this.syncInterval = this.immediateInterval;
+ } else {
+ this._log.trace("Adjusting syncInterval to activeInterval.");
+ this.syncInterval = this.activeInterval;
+ }
+ },
+
+ calculateScore: function calculateScore() {
+ let engines = [this.service.clientsEngine].concat(this.service.engineManager.getEnabled());
+ for (let i = 0;i < engines.length;i++) {
+ this._log.trace(engines[i].name + ": score: " + engines[i].score);
+ this.globalScore += engines[i].score;
+ engines[i]._tracker.resetScore();
+ }
+
+ this._log.trace("Global score updated: " + this.globalScore);
+ this.checkSyncStatus();
+ },
+
+ /**
+ * Process the locally stored clients list to figure out what mode to be in
+ */
+ updateClientMode: function updateClientMode() {
+ // Nothing to do if it's the same amount
+ let numClients = this.service.clientsEngine.stats.numClients;
+ if (this.numClients == numClients)
+ return;
+
+ this._log.debug("Client count: " + this.numClients + " -> " + numClients);
+ this.numClients = numClients;
+
+ if (numClients <= 1) {
+ this._log.trace("Adjusting syncThreshold to SINGLE_USER_THRESHOLD");
+ this.syncThreshold = SINGLE_USER_THRESHOLD;
+ } else {
+ this._log.trace("Adjusting syncThreshold to MULTI_DEVICE_THRESHOLD");
+ this.syncThreshold = MULTI_DEVICE_THRESHOLD;
+ }
+ this.adjustSyncInterval();
+ },
+
+ /**
+ * Check if we should be syncing and schedule the next sync, if it's not scheduled
+ */
+ checkSyncStatus: function checkSyncStatus() {
+ // Should we be syncing now, if not, cancel any sync timers and return
+ // if we're in backoff, we'll schedule the next sync.
+ let ignore = [kSyncBackoffNotMet, kSyncMasterPasswordLocked];
+ let skip = this.service._checkSync(ignore);
+ this._log.trace("_checkSync returned \"" + skip + "\".");
+ if (skip) {
+ this.clearSyncTriggers();
+ return;
+ }
+
+ // Only set the wait time to 0 if we need to sync right away
+ let wait;
+ if (this.globalScore > this.syncThreshold) {
+ this._log.debug("Global Score threshold hit, triggering sync.");
+ wait = 0;
+ }
+ this.scheduleNextSync(wait);
+ },
+
+ /**
+ * Call sync() if Master Password is not locked.
+ *
+ * Otherwise, reschedule a sync for later.
+ */
+ syncIfMPUnlocked: function syncIfMPUnlocked() {
+ // No point if we got kicked out by the master password dialog.
+ if (Status.login == MASTER_PASSWORD_LOCKED &&
+ Utils.mpLocked()) {
+ this._log.debug("Not initiating sync: Login status is " + Status.login);
+
+ // If we're not syncing now, we need to schedule the next one.
+ this._log.trace("Scheduling a sync at MASTER_PASSWORD_LOCKED_RETRY_INTERVAL");
+ this.scheduleAtInterval(MASTER_PASSWORD_LOCKED_RETRY_INTERVAL);
+ return;
+ }
+
+ Utils.nextTick(this.service.sync, this.service);
+ },
+
+ /**
+ * Set a timer for the next sync
+ */
+ scheduleNextSync: function scheduleNextSync(interval) {
+ // If no interval was specified, use the current sync interval.
+ if (interval == null) {
+ interval = this.syncInterval;
+ }
+
+ // Ensure the interval is set to no less than the backoff.
+ if (Status.backoffInterval && interval < Status.backoffInterval) {
+ this._log.trace("Requested interval " + interval +
+ " ms is smaller than the backoff interval. " +
+ "Using backoff interval " +
+ Status.backoffInterval + " ms instead.");
+ interval = Status.backoffInterval;
+ }
+
+ if (this.nextSync != 0) {
+ // There's already a sync scheduled. Don't reschedule if there's already
+ // a timer scheduled for sooner than requested.
+ let currentInterval = this.nextSync - Date.now();
+ this._log.trace("There's already a sync scheduled in " +
+ currentInterval + " ms.");
+ if (currentInterval < interval && this.syncTimer) {
+ this._log.trace("Ignoring scheduling request for next sync in " +
+ interval + " ms.");
+ return;
+ }
+ }
+
+ // Start the sync right away if we're already late.
+ if (interval <= 0) {
+ this._log.trace("Requested sync should happen right away.");
+ this.syncIfMPUnlocked();
+ return;
+ }
+
+ this._log.debug("Next sync in " + interval + " ms.");
+ Utils.namedTimer(this.syncIfMPUnlocked, interval, this, "syncTimer");
+
+ // Save the next sync time in-case sync is disabled (logout/offline/etc.)
+ this.nextSync = Date.now() + interval;
+ },
+
+
+ /**
+ * Incorporates the backoff/retry logic used in error handling and elective
+ * non-syncing.
+ */
+ scheduleAtInterval: function scheduleAtInterval(minimumInterval) {
+ let interval = Utils.calculateBackoff(this._syncErrors,
+ MINIMUM_BACKOFF_INTERVAL,
+ Status.backoffInterval);
+ if (minimumInterval) {
+ interval = Math.max(minimumInterval, interval);
+ }
+
+ this._log.debug("Starting client-initiated backoff. Next sync in " +
+ interval + " ms.");
+ this.scheduleNextSync(interval);
+ },
+
+ /**
+ * Automatically start syncing after the given delay (in seconds).
+ *
+ * Applications can define the `services.sync.autoconnectDelay` preference
+ * to have this called automatically during start-up with the pref value as
+ * the argument. Alternatively, they can call it themselves to control when
+ * Sync should first start to sync.
+ */
+ delayedAutoConnect: function delayedAutoConnect(delay) {
+ if (this.service._checkSetup() == STATUS_OK) {
+ Utils.namedTimer(this.autoConnect, delay * 1000, this, "_autoTimer");
+ }
+ },
+
+ autoConnect: function autoConnect() {
+ if (this.service._checkSetup() == STATUS_OK && !this.service._checkSync()) {
+ // Schedule a sync based on when a previous sync was scheduled.
+ // scheduleNextSync() will do the right thing if that time lies in
+ // the past.
+ this.scheduleNextSync(this.nextSync - Date.now());
+ }
+
+ // Once autoConnect is called we no longer need _autoTimer.
+ if (this._autoTimer) {
+ this._autoTimer.clear();
+ }
+ },
+
+ _syncErrors: 0,
+ /**
+ * Deal with sync errors appropriately
+ */
+ handleSyncError: function handleSyncError() {
+ this._log.trace("In handleSyncError. Error count: " + this._syncErrors);
+ this._syncErrors++;
+
+ // Do nothing on the first couple of failures, if we're not in
+ // backoff due to 5xx errors.
+ if (!Status.enforceBackoff) {
+ if (this._syncErrors < MAX_ERROR_COUNT_BEFORE_BACKOFF) {
+ this.scheduleNextSync();
+ return;
+ }
+ this._log.debug("Sync error count has exceeded " +
+ MAX_ERROR_COUNT_BEFORE_BACKOFF + "; enforcing backoff.");
+ Status.enforceBackoff = true;
+ }
+
+ this.scheduleAtInterval();
+ },
+
+
+ /**
+ * Remove any timers/observers that might trigger a sync
+ */
+ clearSyncTriggers: function clearSyncTriggers() {
+ this._log.debug("Clearing sync triggers and the global score.");
+ this.globalScore = this.nextSync = 0;
+
+ // Clear out any scheduled syncs
+ if (this.syncTimer)
+ this.syncTimer.clear();
+ },
+
+};
+
+this.ErrorHandler = function ErrorHandler(service) {
+ this.service = service;
+ this.init();
+}
+ErrorHandler.prototype = {
+ MINIMUM_ALERT_INTERVAL_MSEC: 604800000, // One week.
+
+ /**
+ * Flag that turns on error reporting for all errors, incl. network errors.
+ */
+ dontIgnoreErrors: false,
+
+ /**
+ * Flag that indicates if we have already reported a prolonged failure.
+ * Once set, we don't report it again, meaning this error is only reported
+ * one per run.
+ */
+ didReportProlongedError: false,
+
+ init: function init() {
+ Svc.Obs.add("weave:engine:sync:applied", this);
+ Svc.Obs.add("weave:engine:sync:error", this);
+ Svc.Obs.add("weave:service:login:error", this);
+ Svc.Obs.add("weave:service:sync:error", this);
+ Svc.Obs.add("weave:service:sync:finish", this);
+
+ this.initLogs();
+ },
+
+ initLogs: function initLogs() {
+ this._log = Log.repository.getLogger("Sync.ErrorHandler");
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.service.main")];
+
+ let root = Log.repository.getLogger("Sync");
+ root.level = Log.Level[Svc.Prefs.get("log.rootLogger")];
+
+ let logs = ["Sync", "FirefoxAccounts", "Hawk", "Common.TokenServerClient",
+ "Sync.SyncMigration", "browserwindow.syncui",
+ "Services.Common.RESTRequest", "Services.Common.RESTRequest",
+ "BookmarkSyncUtils"
+ ];
+
+ this._logManager = new LogManager(Svc.Prefs, logs, "sync");
+ },
+
+ observe: function observe(subject, topic, data) {
+ this._log.trace("Handling " + topic);
+ switch(topic) {
+ case "weave:engine:sync:applied":
+ if (subject.newFailed) {
+ // An engine isn't able to apply one or more incoming records.
+ // We don't fail hard on this, but it usually indicates a bug,
+ // so for now treat it as sync error (c.f. Service._syncEngine())
+ Status.engines = [data, ENGINE_APPLY_FAIL];
+ this._log.debug(data + " failed to apply some records.");
+ }
+ break;
+ case "weave:engine:sync:error": {
+ let exception = subject; // exception thrown by engine's sync() method
+ let engine_name = data; // engine name that threw the exception
+
+ this.checkServerError(exception);
+
+ Status.engines = [engine_name, exception.failureCode || ENGINE_UNKNOWN_FAIL];
+ if (Async.isShutdownException(exception)) {
+ this._log.debug(engine_name + " was interrupted due to the application shutting down");
+ } else {
+ this._log.debug(engine_name + " failed", exception);
+ Services.telemetry.getKeyedHistogramById("WEAVE_ENGINE_SYNC_ERRORS")
+ .add(engine_name);
+ }
+ break;
+ }
+ case "weave:service:login:error":
+ this._log.error("Sync encountered a login error");
+ this.resetFileLog();
+
+ if (this.shouldReportError()) {
+ this.notifyOnNextTick("weave:ui:login:error");
+ } else {
+ this.notifyOnNextTick("weave:ui:clear-error");
+ }
+
+ this.dontIgnoreErrors = false;
+ break;
+ case "weave:service:sync:error": {
+ if (Status.sync == CREDENTIALS_CHANGED) {
+ this.service.logout();
+ }
+
+ let exception = subject;
+ if (Async.isShutdownException(exception)) {
+ // If we are shutting down we just log the fact, attempt to flush
+ // the log file and get out of here!
+ this._log.error("Sync was interrupted due to the application shutting down");
+ this.resetFileLog();
+ break;
+ }
+
+ // Not a shutdown related exception...
+ this._log.error("Sync encountered an error", exception);
+ this.resetFileLog();
+
+ if (this.shouldReportError()) {
+ this.notifyOnNextTick("weave:ui:sync:error");
+ } else {
+ this.notifyOnNextTick("weave:ui:sync:finish");
+ }
+
+ this.dontIgnoreErrors = false;
+ break;
+ }
+ case "weave:service:sync:finish":
+ this._log.trace("Status.service is " + Status.service);
+
+ // Check both of these status codes: in the event of a failure in one
+ // engine, Status.service will be SYNC_FAILED_PARTIAL despite
+ // Status.sync being SYNC_SUCCEEDED.
+ // *facepalm*
+ if (Status.sync == SYNC_SUCCEEDED &&
+ Status.service == STATUS_OK) {
+ // Great. Let's clear our mid-sync 401 note.
+ this._log.trace("Clearing lastSyncReassigned.");
+ Svc.Prefs.reset("lastSyncReassigned");
+ }
+
+ if (Status.service == SYNC_FAILED_PARTIAL) {
+ this._log.error("Some engines did not sync correctly.");
+ this.resetFileLog();
+
+ if (this.shouldReportError()) {
+ this.dontIgnoreErrors = false;
+ this.notifyOnNextTick("weave:ui:sync:error");
+ break;
+ }
+ } else {
+ this.resetFileLog();
+ }
+ this.dontIgnoreErrors = false;
+ this.notifyOnNextTick("weave:ui:sync:finish");
+ break;
+ }
+ },
+
+ notifyOnNextTick: function notifyOnNextTick(topic) {
+ Utils.nextTick(function() {
+ this._log.trace("Notifying " + topic +
+ ". Status.login is " + Status.login +
+ ". Status.sync is " + Status.sync);
+ Svc.Obs.notify(topic);
+ }, this);
+ },
+
+ /**
+ * Trigger a sync and don't muffle any errors, particularly network errors.
+ */
+ syncAndReportErrors: function syncAndReportErrors() {
+ this._log.debug("Beginning user-triggered sync.");
+
+ this.dontIgnoreErrors = true;
+ Utils.nextTick(this.service.sync, this.service);
+ },
+
+ _dumpAddons: function _dumpAddons() {
+ // Just dump the items that sync may be concerned with. Specifically,
+ // active extensions that are not hidden.
+ let addonPromise = new Promise(resolve => {
+ try {
+ AddonManager.getAddonsByTypes(["extension"], resolve);
+ } catch (e) {
+ this._log.warn("Failed to dump addons", e)
+ resolve([])
+ }
+ });
+
+ return addonPromise.then(addons => {
+ let relevantAddons = addons.filter(x => x.isActive && !x.hidden);
+ this._log.debug("Addons installed", relevantAddons.length);
+ for (let addon of relevantAddons) {
+ this._log.debug(" - ${name}, version ${version}, id ${id}", addon);
+ }
+ });
+ },
+
+ /**
+ * Generate a log file for the sync that just completed
+ * and refresh the input & output streams.
+ */
+ resetFileLog: function resetFileLog() {
+ let onComplete = logType => {
+ Svc.Obs.notify("weave:service:reset-file-log");
+ this._log.trace("Notified: " + Date.now());
+ if (logType == this._logManager.ERROR_LOG_WRITTEN) {
+ Cu.reportError("Sync encountered an error - see about:sync-log for the log file.");
+ }
+ };
+
+ // If we're writing an error log, dump extensions that may be causing problems.
+ let beforeResetLog;
+ if (this._logManager.sawError) {
+ beforeResetLog = this._dumpAddons();
+ } else {
+ beforeResetLog = Promise.resolve();
+ }
+ // Note we do not return the promise here - the caller doesn't need to wait
+ // for this to complete.
+ beforeResetLog
+ .then(() => this._logManager.resetFileLog())
+ .then(onComplete, onComplete);
+ },
+
+ /**
+ * Translates server error codes to meaningful strings.
+ *
+ * @param code
+ * server error code as an integer
+ */
+ errorStr: function errorStr(code) {
+ switch (code.toString()) {
+ case "1":
+ return "illegal-method";
+ case "2":
+ return "invalid-captcha";
+ case "3":
+ return "invalid-username";
+ case "4":
+ return "cannot-overwrite-resource";
+ case "5":
+ return "userid-mismatch";
+ case "6":
+ return "json-parse-failure";
+ case "7":
+ return "invalid-password";
+ case "8":
+ return "invalid-record";
+ case "9":
+ return "weak-password";
+ default:
+ return "generic-server-error";
+ }
+ },
+
+ // A function to indicate if Sync errors should be "reported" - which in this
+ // context really means "should be notify observers of an error" - but note
+ // that since bug 1180587, no one is going to surface an error to the user.
+ shouldReportError: function shouldReportError() {
+ if (Status.login == MASTER_PASSWORD_LOCKED) {
+ this._log.trace("shouldReportError: false (master password locked).");
+ return false;
+ }
+
+ if (this.dontIgnoreErrors) {
+ return true;
+ }
+
+ if (Status.login == LOGIN_FAILED_LOGIN_REJECTED) {
+ // An explicit LOGIN_REJECTED state is always reported (bug 1081158)
+ this._log.trace("shouldReportError: true (login was rejected)");
+ return true;
+ }
+
+ let lastSync = Svc.Prefs.get("lastSync");
+ if (lastSync && ((Date.now() - Date.parse(lastSync)) >
+ Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 1000)) {
+ Status.sync = PROLONGED_SYNC_FAILURE;
+ if (this.didReportProlongedError) {
+ this._log.trace("shouldReportError: false (prolonged sync failure, but" +
+ " we've already reported it).");
+ return false;
+ }
+ this._log.trace("shouldReportError: true (first prolonged sync failure).");
+ this.didReportProlongedError = true;
+ return true;
+ }
+
+ // We got a 401 mid-sync. Wait for the next sync before actually handling
+ // an error. This assumes that we'll get a 401 again on a login fetch in
+ // order to report the error.
+ if (!this.service.clusterURL) {
+ this._log.trace("shouldReportError: false (no cluster URL; " +
+ "possible node reassignment).");
+ return false;
+ }
+
+
+ let result = ([Status.login, Status.sync].indexOf(SERVER_MAINTENANCE) == -1 &&
+ [Status.login, Status.sync].indexOf(LOGIN_FAILED_NETWORK_ERROR) == -1);
+ this._log.trace("shouldReportError: ${result} due to login=${login}, sync=${sync}",
+ {result, login: Status.login, sync: Status.sync});
+ return result;
+ },
+
+ get currentAlertMode() {
+ return Svc.Prefs.get("errorhandler.alert.mode");
+ },
+
+ set currentAlertMode(str) {
+ return Svc.Prefs.set("errorhandler.alert.mode", str);
+ },
+
+ get earliestNextAlert() {
+ return Svc.Prefs.get("errorhandler.alert.earliestNext", 0) * 1000;
+ },
+
+ set earliestNextAlert(msec) {
+ return Svc.Prefs.set("errorhandler.alert.earliestNext", msec / 1000);
+ },
+
+ clearServerAlerts: function () {
+ // If we have any outstanding alerts, apparently they're no longer relevant.
+ Svc.Prefs.resetBranch("errorhandler.alert");
+ },
+
+ /**
+ * X-Weave-Alert headers can include a JSON object:
+ *
+ * {
+ * "code": // One of "hard-eol", "soft-eol".
+ * "url": // For "Learn more" link.
+ * "message": // Logged in Sync logs.
+ * }
+ */
+ handleServerAlert: function (xwa) {
+ if (!xwa.code) {
+ this._log.warn("Got structured X-Weave-Alert, but no alert code.");
+ return;
+ }
+
+ switch (xwa.code) {
+ // Gently and occasionally notify the user that this service will be
+ // shutting down.
+ case "soft-eol":
+ // Fall through.
+
+ // Tell the user that this service has shut down, and drop our syncing
+ // frequency dramatically.
+ case "hard-eol":
+ // Note that both of these alerts should be subservient to future "sign
+ // in with your Firefox Account" storage alerts.
+ if ((this.currentAlertMode != xwa.code) ||
+ (this.earliestNextAlert < Date.now())) {
+ Utils.nextTick(function() {
+ Svc.Obs.notify("weave:eol", xwa);
+ }, this);
+ this._log.error("X-Weave-Alert: " + xwa.code + ": " + xwa.message);
+ this.earliestNextAlert = Date.now() + this.MINIMUM_ALERT_INTERVAL_MSEC;
+ this.currentAlertMode = xwa.code;
+ }
+ break;
+ default:
+ this._log.debug("Got unexpected X-Weave-Alert code: " + xwa.code);
+ }
+ },
+
+ /**
+ * Handle HTTP response results or exceptions and set the appropriate
+ * Status.* bits.
+ *
+ * This method also looks for "side-channel" warnings.
+ */
+ checkServerError: function (resp) {
+ switch (resp.status) {
+ case 200:
+ case 404:
+ case 513:
+ let xwa = resp.headers['x-weave-alert'];
+
+ // Only process machine-readable alerts.
+ if (!xwa || !xwa.startsWith("{")) {
+ this.clearServerAlerts();
+ return;
+ }
+
+ try {
+ xwa = JSON.parse(xwa);
+ } catch (ex) {
+ this._log.warn("Malformed X-Weave-Alert from server: " + xwa);
+ return;
+ }
+
+ this.handleServerAlert(xwa);
+ break;
+
+ case 400:
+ if (resp == RESPONSE_OVER_QUOTA) {
+ Status.sync = OVER_QUOTA;
+ }
+ break;
+
+ case 401:
+ this.service.logout();
+ this._log.info("Got 401 response; resetting clusterURL.");
+ this.service.clusterURL = null;
+
+ let delay = 0;
+ if (Svc.Prefs.get("lastSyncReassigned")) {
+ // We got a 401 in the middle of the previous sync, and we just got
+ // another. Login must have succeeded in order for us to get here, so
+ // the password should be correct.
+ // This is likely to be an intermittent server issue, so back off and
+ // give it time to recover.
+ this._log.warn("Last sync also failed for 401. Delaying next sync.");
+ delay = MINIMUM_BACKOFF_INTERVAL;
+ } else {
+ this._log.debug("New mid-sync 401 failure. Making a note.");
+ Svc.Prefs.set("lastSyncReassigned", true);
+ }
+ this._log.info("Attempting to schedule another sync.");
+ this.service.scheduler.scheduleNextSync(delay);
+ break;
+
+ case 500:
+ case 502:
+ case 503:
+ case 504:
+ Status.enforceBackoff = true;
+ if (resp.status == 503 && resp.headers["retry-after"]) {
+ let retryAfter = resp.headers["retry-after"];
+ this._log.debug("Got Retry-After: " + retryAfter);
+ if (this.service.isLoggedIn) {
+ Status.sync = SERVER_MAINTENANCE;
+ } else {
+ Status.login = SERVER_MAINTENANCE;
+ }
+ Svc.Obs.notify("weave:service:backoff:interval",
+ parseInt(retryAfter, 10));
+ }
+ break;
+ }
+
+ switch (resp.result) {
+ case Cr.NS_ERROR_UNKNOWN_HOST:
+ case Cr.NS_ERROR_CONNECTION_REFUSED:
+ case Cr.NS_ERROR_NET_TIMEOUT:
+ case Cr.NS_ERROR_NET_RESET:
+ case Cr.NS_ERROR_NET_INTERRUPT:
+ case Cr.NS_ERROR_PROXY_CONNECTION_REFUSED:
+ // The constant says it's about login, but in fact it just
+ // indicates general network error.
+ if (this.service.isLoggedIn) {
+ Status.sync = LOGIN_FAILED_NETWORK_ERROR;
+ } else {
+ Status.login = LOGIN_FAILED_NETWORK_ERROR;
+ }
+ break;
+ }
+ },
+};
diff --git a/services/sync/modules/record.js b/services/sync/modules/record.js
new file mode 100644
index 000000000..02f7f281a
--- /dev/null
+++ b/services/sync/modules/record.js
@@ -0,0 +1,1039 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = [
+ "WBORecord",
+ "RecordManager",
+ "CryptoWrapper",
+ "CollectionKeyManager",
+ "Collection",
+];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cr = Components.results;
+var Cu = Components.utils;
+
+const CRYPTO_COLLECTION = "crypto";
+const KEYS_WBO = "keys";
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/async.js");
+
+this.WBORecord = function WBORecord(collection, id) {
+ this.data = {};
+ this.payload = {};
+ this.collection = collection; // Optional.
+ this.id = id; // Optional.
+}
+WBORecord.prototype = {
+ _logName: "Sync.Record.WBO",
+
+ get sortindex() {
+ if (this.data.sortindex)
+ return this.data.sortindex;
+ return 0;
+ },
+
+ // Get thyself from your URI, then deserialize.
+ // Set thine 'response' field.
+ fetch: function fetch(resource) {
+ if (!resource instanceof Resource) {
+ throw new Error("First argument must be a Resource instance.");
+ }
+
+ let r = resource.get();
+ if (r.success) {
+ this.deserialize(r); // Warning! Muffles exceptions!
+ }
+ this.response = r;
+ return this;
+ },
+
+ upload: function upload(resource) {
+ if (!resource instanceof Resource) {
+ throw new Error("First argument must be a Resource instance.");
+ }
+
+ return resource.put(this);
+ },
+
+ // Take a base URI string, with trailing slash, and return the URI of this
+ // WBO based on collection and ID.
+ uri: function(base) {
+ if (this.collection && this.id) {
+ let url = Utils.makeURI(base + this.collection + "/" + this.id);
+ url.QueryInterface(Ci.nsIURL);
+ return url;
+ }
+ return null;
+ },
+
+ deserialize: function deserialize(json) {
+ this.data = json.constructor.toString() == String ? JSON.parse(json) : json;
+
+ try {
+ // The payload is likely to be JSON, but if not, keep it as a string
+ this.payload = JSON.parse(this.payload);
+ } catch(ex) {}
+ },
+
+ toJSON: function toJSON() {
+ // Copy fields from data to be stringified, making sure payload is a string
+ let obj = {};
+ for (let [key, val] of Object.entries(this.data))
+ obj[key] = key == "payload" ? JSON.stringify(val) : val;
+ if (this.ttl)
+ obj.ttl = this.ttl;
+ return obj;
+ },
+
+ toString: function toString() {
+ return "{ " +
+ "id: " + this.id + " " +
+ "index: " + this.sortindex + " " +
+ "modified: " + this.modified + " " +
+ "ttl: " + this.ttl + " " +
+ "payload: " + JSON.stringify(this.payload) +
+ " }";
+ }
+};
+
+Utils.deferGetSet(WBORecord, "data", ["id", "modified", "sortindex", "payload"]);
+
+this.CryptoWrapper = function CryptoWrapper(collection, id) {
+ this.cleartext = {};
+ WBORecord.call(this, collection, id);
+ this.ciphertext = null;
+ this.id = id;
+}
+CryptoWrapper.prototype = {
+ __proto__: WBORecord.prototype,
+ _logName: "Sync.Record.CryptoWrapper",
+
+ ciphertextHMAC: function ciphertextHMAC(keyBundle) {
+ let hasher = keyBundle.sha256HMACHasher;
+ if (!hasher) {
+ throw "Cannot compute HMAC without an HMAC key.";
+ }
+
+ return Utils.bytesAsHex(Utils.digestUTF8(this.ciphertext, hasher));
+ },
+
+ /*
+ * Don't directly use the sync key. Instead, grab a key for this
+ * collection, which is decrypted with the sync key.
+ *
+ * Cache those keys; invalidate the cache if the time on the keys collection
+ * changes, or other auth events occur.
+ *
+ * Optional key bundle overrides the collection key lookup.
+ */
+ encrypt: function encrypt(keyBundle) {
+ if (!keyBundle) {
+ throw new Error("A key bundle must be supplied to encrypt.");
+ }
+
+ this.IV = Svc.Crypto.generateRandomIV();
+ this.ciphertext = Svc.Crypto.encrypt(JSON.stringify(this.cleartext),
+ keyBundle.encryptionKeyB64, this.IV);
+ this.hmac = this.ciphertextHMAC(keyBundle);
+ this.cleartext = null;
+ },
+
+ // Optional key bundle.
+ decrypt: function decrypt(keyBundle) {
+ if (!this.ciphertext) {
+ throw "No ciphertext: nothing to decrypt?";
+ }
+
+ if (!keyBundle) {
+ throw new Error("A key bundle must be supplied to decrypt.");
+ }
+
+ // Authenticate the encrypted blob with the expected HMAC
+ let computedHMAC = this.ciphertextHMAC(keyBundle);
+
+ if (computedHMAC != this.hmac) {
+ Utils.throwHMACMismatch(this.hmac, computedHMAC);
+ }
+
+ // Handle invalid data here. Elsewhere we assume that cleartext is an object.
+ let cleartext = Svc.Crypto.decrypt(this.ciphertext,
+ keyBundle.encryptionKeyB64, this.IV);
+ let json_result = JSON.parse(cleartext);
+
+ if (json_result && (json_result instanceof Object)) {
+ this.cleartext = json_result;
+ this.ciphertext = null;
+ } else {
+ throw "Decryption failed: result is <" + json_result + ">, not an object.";
+ }
+
+ // Verify that the encrypted id matches the requested record's id.
+ if (this.cleartext.id != this.id)
+ throw "Record id mismatch: " + this.cleartext.id + " != " + this.id;
+
+ return this.cleartext;
+ },
+
+ toString: function toString() {
+ let payload = this.deleted ? "DELETED" : JSON.stringify(this.cleartext);
+
+ return "{ " +
+ "id: " + this.id + " " +
+ "index: " + this.sortindex + " " +
+ "modified: " + this.modified + " " +
+ "ttl: " + this.ttl + " " +
+ "payload: " + payload + " " +
+ "collection: " + (this.collection || "undefined") +
+ " }";
+ },
+
+ // The custom setter below masks the parent's getter, so explicitly call it :(
+ get id() {
+ return WBORecord.prototype.__lookupGetter__("id").call(this);
+ },
+
+ // Keep both plaintext and encrypted versions of the id to verify integrity
+ set id(val) {
+ WBORecord.prototype.__lookupSetter__("id").call(this, val);
+ return this.cleartext.id = val;
+ },
+};
+
+Utils.deferGetSet(CryptoWrapper, "payload", ["ciphertext", "IV", "hmac"]);
+Utils.deferGetSet(CryptoWrapper, "cleartext", "deleted");
+
+/**
+ * An interface and caching layer for records.
+ */
+this.RecordManager = function RecordManager(service) {
+ this.service = service;
+
+ this._log = Log.repository.getLogger(this._logName);
+ this._records = {};
+}
+RecordManager.prototype = {
+ _recordType: CryptoWrapper,
+ _logName: "Sync.RecordManager",
+
+ import: function RecordMgr_import(url) {
+ this._log.trace("Importing record: " + (url.spec ? url.spec : url));
+ try {
+ // Clear out the last response with empty object if GET fails
+ this.response = {};
+ this.response = this.service.resource(url).get();
+
+ // Don't parse and save the record on failure
+ if (!this.response.success)
+ return null;
+
+ let record = new this._recordType(url);
+ record.deserialize(this.response);
+
+ return this.set(url, record);
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.debug("Failed to import record", ex);
+ return null;
+ }
+ },
+
+ get: function RecordMgr_get(url) {
+ // Use a url string as the key to the hash
+ let spec = url.spec ? url.spec : url;
+ if (spec in this._records)
+ return this._records[spec];
+ return this.import(url);
+ },
+
+ set: function RecordMgr_set(url, record) {
+ let spec = url.spec ? url.spec : url;
+ return this._records[spec] = record;
+ },
+
+ contains: function RecordMgr_contains(url) {
+ if ((url.spec || url) in this._records)
+ return true;
+ return false;
+ },
+
+ clearCache: function recordMgr_clearCache() {
+ this._records = {};
+ },
+
+ del: function RecordMgr_del(url) {
+ delete this._records[url];
+ }
+};
+
+/**
+ * Keeps track of mappings between collection names ('tabs') and KeyBundles.
+ *
+ * You can update this thing simply by giving it /info/collections. It'll
+ * use the last modified time to bring itself up to date.
+ */
+this.CollectionKeyManager = function CollectionKeyManager(lastModified, default_, collections) {
+ this.lastModified = lastModified || 0;
+ this._default = default_ || null;
+ this._collections = collections || {};
+
+ this._log = Log.repository.getLogger("Sync.CollectionKeyManager");
+}
+
+// TODO: persist this locally as an Identity. Bug 610913.
+// Note that the last modified time needs to be preserved.
+CollectionKeyManager.prototype = {
+
+ /**
+ * Generate a new CollectionKeyManager that has the same attributes
+ * as this one.
+ */
+ clone() {
+ const newCollections = {};
+ for (let c in this._collections) {
+ newCollections[c] = this._collections[c];
+ }
+
+ return new CollectionKeyManager(this.lastModified, this._default, newCollections);
+ },
+
+ // Return information about old vs new keys:
+ // * same: true if two collections are equal
+ // * changed: an array of collection names that changed.
+ _compareKeyBundleCollections: function _compareKeyBundleCollections(m1, m2) {
+ let changed = [];
+
+ function process(m1, m2) {
+ for (let k1 in m1) {
+ let v1 = m1[k1];
+ let v2 = m2[k1];
+ if (!(v1 && v2 && v1.equals(v2)))
+ changed.push(k1);
+ }
+ }
+
+ // Diffs both ways.
+ process(m1, m2);
+ process(m2, m1);
+
+ // Return a sorted, unique array.
+ changed.sort();
+ let last;
+ changed = changed.filter(x => (x != last) && (last = x));
+ return {same: changed.length == 0,
+ changed: changed};
+ },
+
+ get isClear() {
+ return !this._default;
+ },
+
+ clear: function clear() {
+ this._log.info("Clearing collection keys...");
+ this.lastModified = 0;
+ this._collections = {};
+ this._default = null;
+ },
+
+ keyForCollection: function(collection) {
+ if (collection && this._collections[collection])
+ return this._collections[collection];
+
+ return this._default;
+ },
+
+ /**
+ * If `collections` (an array of strings) is provided, iterate
+ * over it and generate random keys for each collection.
+ * Create a WBO for the given data.
+ */
+ _makeWBO: function(collections, defaultBundle) {
+ let wbo = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
+ let c = {};
+ for (let k in collections) {
+ c[k] = collections[k].keyPairB64;
+ }
+ wbo.cleartext = {
+ "default": defaultBundle ? defaultBundle.keyPairB64 : null,
+ "collections": c,
+ "collection": CRYPTO_COLLECTION,
+ "id": KEYS_WBO
+ };
+ return wbo;
+ },
+
+ /**
+ * Create a WBO for the current keys.
+ */
+ asWBO: function(collection, id) {
+ return this._makeWBO(this._collections, this._default);
+ },
+
+ /**
+ * Compute a new default key, and new keys for any specified collections.
+ */
+ newKeys: function(collections) {
+ let newDefaultKeyBundle = this.newDefaultKeyBundle();
+
+ let newColls = {};
+ if (collections) {
+ collections.forEach(function (c) {
+ let b = new BulkKeyBundle(c);
+ b.generateRandom();
+ newColls[c] = b;
+ });
+ }
+ return [newDefaultKeyBundle, newColls];
+ },
+
+ /**
+ * Generates new keys, but does not replace our local copy. Use this to
+ * verify an upload before storing.
+ */
+ generateNewKeysWBO: function(collections) {
+ let newDefaultKey, newColls;
+ [newDefaultKey, newColls] = this.newKeys(collections);
+
+ return this._makeWBO(newColls, newDefaultKey);
+ },
+
+ /**
+ * Create a new default key.
+ *
+ * @returns {BulkKeyBundle}
+ */
+ newDefaultKeyBundle() {
+ const key = new BulkKeyBundle(DEFAULT_KEYBUNDLE_NAME);
+ key.generateRandom();
+ return key;
+ },
+
+ /**
+ * Create a new default key and store it as this._default, since without one you cannot use setContents.
+ */
+ generateDefaultKey() {
+ this._default = this.newDefaultKeyBundle();
+ },
+
+ /**
+ * Return true if keys are already present for each of the given
+ * collections.
+ */
+ hasKeysFor(collections) {
+ // We can't use filter() here because sometimes collections is an iterator.
+ for (let collection of collections) {
+ if (!this._collections[collection]) {
+ return false;
+ }
+ }
+ return true;
+ },
+
+ /**
+ * Return a new CollectionKeyManager that has keys for each of the
+ * given collections (creating new ones for collections where we
+ * don't already have keys).
+ */
+ ensureKeysFor(collections) {
+ const newKeys = Object.assign({}, this._collections);
+ for (let c of collections) {
+ if (newKeys[c]) {
+ continue; // don't replace existing keys
+ }
+
+ const b = new BulkKeyBundle(c);
+ b.generateRandom();
+ newKeys[c] = b;
+ }
+ return new CollectionKeyManager(this.lastModified, this._default, newKeys);
+ },
+
+ // Take the fetched info/collections WBO, checking the change
+ // time of the crypto collection.
+ updateNeeded: function(info_collections) {
+
+ this._log.info("Testing for updateNeeded. Last modified: " + this.lastModified);
+
+ // No local record of modification time? Need an update.
+ if (!this.lastModified)
+ return true;
+
+ // No keys on the server? We need an update, though our
+ // update handling will be a little more drastic...
+ if (!(CRYPTO_COLLECTION in info_collections))
+ return true;
+
+ // Otherwise, we need an update if our modification time is stale.
+ return (info_collections[CRYPTO_COLLECTION] > this.lastModified);
+ },
+
+ //
+ // Set our keys and modified time to the values fetched from the server.
+ // Returns one of three values:
+ //
+ // * If the default key was modified, return true.
+ // * If the default key was not modified, but per-collection keys were,
+ // return an array of such.
+ // * Otherwise, return false -- we were up-to-date.
+ //
+ setContents: function setContents(payload, modified) {
+
+ let self = this;
+
+ this._log.info("Setting collection keys contents. Our last modified: " +
+ this.lastModified + ", input modified: " + modified + ".");
+
+ if (!payload)
+ throw "No payload in CollectionKeyManager.setContents().";
+
+ if (!payload.default) {
+ this._log.warn("No downloaded default key: this should not occur.");
+ this._log.warn("Not clearing local keys.");
+ throw "No default key in CollectionKeyManager.setContents(). Cannot proceed.";
+ }
+
+ // Process the incoming default key.
+ let b = new BulkKeyBundle(DEFAULT_KEYBUNDLE_NAME);
+ b.keyPairB64 = payload.default;
+ let newDefault = b;
+
+ // Process the incoming collections.
+ let newCollections = {};
+ if ("collections" in payload) {
+ this._log.info("Processing downloaded per-collection keys.");
+ let colls = payload.collections;
+ for (let k in colls) {
+ let v = colls[k];
+ if (v) {
+ let keyObj = new BulkKeyBundle(k);
+ keyObj.keyPairB64 = v;
+ newCollections[k] = keyObj;
+ }
+ }
+ }
+
+ // Check to see if these are already our keys.
+ let sameDefault = (this._default && this._default.equals(newDefault));
+ let collComparison = this._compareKeyBundleCollections(newCollections, this._collections);
+ let sameColls = collComparison.same;
+
+ if (sameDefault && sameColls) {
+ self._log.info("New keys are the same as our old keys!");
+ if (modified) {
+ self._log.info("Bumped local modified time.");
+ self.lastModified = modified;
+ }
+ return false;
+ }
+
+ // Make sure things are nice and tidy before we set.
+ this.clear();
+
+ this._log.info("Saving downloaded keys.");
+ this._default = newDefault;
+ this._collections = newCollections;
+
+ // Always trust the server.
+ if (modified) {
+ self._log.info("Bumping last modified to " + modified);
+ self.lastModified = modified;
+ }
+
+ return sameDefault ? collComparison.changed : true;
+ },
+
+ updateContents: function updateContents(syncKeyBundle, storage_keys) {
+ let log = this._log;
+ log.info("Updating collection keys...");
+
+ // storage_keys is a WBO, fetched from storage/crypto/keys.
+ // Its payload is the default key, and a map of collections to keys.
+ // We lazily compute the key objects from the strings we're given.
+
+ let payload;
+ try {
+ payload = storage_keys.decrypt(syncKeyBundle);
+ } catch (ex) {
+ log.warn("Got exception \"" + ex + "\" decrypting storage keys with sync key.");
+ log.info("Aborting updateContents. Rethrowing.");
+ throw ex;
+ }
+
+ let r = this.setContents(payload, storage_keys.modified);
+ log.info("Collection keys updated.");
+ return r;
+ }
+}
+
+this.Collection = function Collection(uri, recordObj, service) {
+ if (!service) {
+ throw new Error("Collection constructor requires a service.");
+ }
+
+ Resource.call(this, uri);
+
+ // This is a bit hacky, but gets the job done.
+ let res = service.resource(uri);
+ this.authenticator = res.authenticator;
+
+ this._recordObj = recordObj;
+ this._service = service;
+
+ this._full = false;
+ this._ids = null;
+ this._limit = 0;
+ this._older = 0;
+ this._newer = 0;
+ this._data = [];
+ // optional members used by batch upload operations.
+ this._batch = null;
+ this._commit = false;
+ // Used for batch download operations -- note that this is explicitly an
+ // opaque value and not (necessarily) a number.
+ this._offset = null;
+}
+Collection.prototype = {
+ __proto__: Resource.prototype,
+ _logName: "Sync.Collection",
+
+ _rebuildURL: function Coll__rebuildURL() {
+ // XXX should consider what happens if it's not a URL...
+ this.uri.QueryInterface(Ci.nsIURL);
+
+ let args = [];
+ if (this.older)
+ args.push('older=' + this.older);
+ else if (this.newer) {
+ args.push('newer=' + this.newer);
+ }
+ if (this.full)
+ args.push('full=1');
+ if (this.sort)
+ args.push('sort=' + this.sort);
+ if (this.ids != null)
+ args.push("ids=" + this.ids);
+ if (this.limit > 0 && this.limit != Infinity)
+ args.push("limit=" + this.limit);
+ if (this._batch)
+ args.push("batch=" + encodeURIComponent(this._batch));
+ if (this._commit)
+ args.push("commit=true");
+ if (this._offset)
+ args.push("offset=" + encodeURIComponent(this._offset));
+
+ this.uri.query = (args.length > 0)? '?' + args.join('&') : '';
+ },
+
+ // get full items
+ get full() { return this._full; },
+ set full(value) {
+ this._full = value;
+ this._rebuildURL();
+ },
+
+ // Apply the action to a certain set of ids
+ get ids() { return this._ids; },
+ set ids(value) {
+ this._ids = value;
+ this._rebuildURL();
+ },
+
+ // Limit how many records to get
+ get limit() { return this._limit; },
+ set limit(value) {
+ this._limit = value;
+ this._rebuildURL();
+ },
+
+ // get only items modified before some date
+ get older() { return this._older; },
+ set older(value) {
+ this._older = value;
+ this._rebuildURL();
+ },
+
+ // get only items modified since some date
+ get newer() { return this._newer; },
+ set newer(value) {
+ this._newer = value;
+ this._rebuildURL();
+ },
+
+ // get items sorted by some criteria. valid values:
+ // oldest (oldest first)
+ // newest (newest first)
+ // index
+ get sort() { return this._sort; },
+ set sort(value) {
+ this._sort = value;
+ this._rebuildURL();
+ },
+
+ get offset() { return this._offset; },
+ set offset(value) {
+ this._offset = value;
+ this._rebuildURL();
+ },
+
+ // Set information about the batch for this request.
+ get batch() { return this._batch; },
+ set batch(value) {
+ this._batch = value;
+ this._rebuildURL();
+ },
+
+ get commit() { return this._commit; },
+ set commit(value) {
+ this._commit = value && true;
+ this._rebuildURL();
+ },
+
+ // Similar to get(), but will page through the items `batchSize` at a time,
+ // deferring calling the record handler until we've gotten them all.
+ //
+ // Returns the last response processed, and doesn't run the record handler
+ // on any items if a non-success status is received while downloading the
+ // records (or if a network error occurs).
+ getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) {
+ let totalLimit = Number(this.limit) || Infinity;
+ if (batchSize <= 0 || batchSize >= totalLimit) {
+ // Invalid batch sizes should arguably be an error, but they're easy to handle
+ return this.get();
+ }
+
+ if (!this.full) {
+ throw new Error("getBatched is unimplemented for guid-only GETs");
+ }
+
+ // _onComplete and _onProgress are reset after each `get` by AsyncResource.
+ // We overwrite _onRecord to something that stores the data in an array
+ // until the end.
+ let { _onComplete, _onProgress, _onRecord } = this;
+ let recordBuffer = [];
+ let resp;
+ try {
+ this._onRecord = r => recordBuffer.push(r);
+ let lastModifiedTime;
+ this.limit = batchSize;
+
+ do {
+ this._onProgress = _onProgress;
+ this._onComplete = _onComplete;
+ if (batchSize + recordBuffer.length > totalLimit) {
+ this.limit = totalLimit - recordBuffer.length;
+ }
+ this._log.trace("Performing batched GET", { limit: this.limit, offset: this.offset });
+ // Actually perform the request
+ resp = this.get();
+ if (!resp.success) {
+ break;
+ }
+
+ // Initialize last modified, or check that something broken isn't happening.
+ let lastModified = resp.headers["x-last-modified"];
+ if (!lastModifiedTime) {
+ lastModifiedTime = lastModified;
+ this.setHeader("X-If-Unmodified-Since", lastModified);
+ } else if (lastModified != lastModifiedTime) {
+ // Should be impossible -- We'd get a 412 in this case.
+ throw new Error("X-Last-Modified changed in the middle of a download batch! " +
+ `${lastModified} => ${lastModifiedTime}`)
+ }
+
+ // If this is missing, we're finished.
+ this.offset = resp.headers["x-weave-next-offset"];
+ } while (this.offset && totalLimit > recordBuffer.length);
+ } finally {
+ // Ensure we undo any temporary state so that subsequent calls to get()
+ // or getBatched() work properly. We do this before calling the record
+ // handler so that we can more convincingly pretend to be a normal get()
+ // call. Note: we're resetting these to the values they had before this
+ // function was called.
+ this._onRecord = _onRecord;
+ this._limit = totalLimit;
+ this._offset = null;
+ delete this._headers["x-if-unmodified-since"];
+ this._rebuildURL();
+ }
+ if (resp.success && Async.checkAppReady()) {
+ // call the original _onRecord (e.g. the user supplied record handler)
+ // for each record we've stored
+ for (let record of recordBuffer) {
+ this._onRecord(record);
+ }
+ }
+ return resp;
+ },
+
+ set recordHandler(onRecord) {
+ // Save this because onProgress is called with this as the ChannelListener
+ let coll = this;
+
+ // Switch to newline separated records for incremental parsing
+ coll.setHeader("Accept", "application/newlines");
+
+ this._onRecord = onRecord;
+
+ this._onProgress = function() {
+ let newline;
+ while ((newline = this._data.indexOf("\n")) > 0) {
+ // Split the json record from the rest of the data
+ let json = this._data.slice(0, newline);
+ this._data = this._data.slice(newline + 1);
+
+ // Deserialize a record from json and give it to the callback
+ let record = new coll._recordObj();
+ record.deserialize(json);
+ coll._onRecord(record);
+ }
+ };
+ },
+
+ // This object only supports posting via the postQueue object.
+ post() {
+ throw new Error("Don't directly post to a collection - use newPostQueue instead");
+ },
+
+ newPostQueue(log, timestamp, postCallback) {
+ let poster = (data, headers, batch, commit) => {
+ this.batch = batch;
+ this.commit = commit;
+ for (let [header, value] of headers) {
+ this.setHeader(header, value);
+ }
+ return Resource.prototype.post.call(this, data);
+ }
+ let getConfig = (name, defaultVal) => {
+ if (this._service.serverConfiguration && this._service.serverConfiguration.hasOwnProperty(name)) {
+ return this._service.serverConfiguration[name];
+ }
+ return defaultVal;
+ }
+
+ let config = {
+ max_post_bytes: getConfig("max_post_bytes", MAX_UPLOAD_BYTES),
+ max_post_records: getConfig("max_post_records", MAX_UPLOAD_RECORDS),
+
+ max_batch_bytes: getConfig("max_total_bytes", Infinity),
+ max_batch_records: getConfig("max_total_records", Infinity),
+ }
+
+ // Handle config edge cases
+ if (config.max_post_records <= 0) { config.max_post_records = MAX_UPLOAD_RECORDS; }
+ if (config.max_batch_records <= 0) { config.max_batch_records = Infinity; }
+ if (config.max_post_bytes <= 0) { config.max_post_bytes = MAX_UPLOAD_BYTES; }
+ if (config.max_batch_bytes <= 0) { config.max_batch_bytes = Infinity; }
+
+ // Max size of BSO payload is 256k. This assumes at most 4k of overhead,
+ // which sounds like plenty. If the server says it can't handle this, we
+ // might have valid records we can't sync, so we give up on syncing.
+ let requiredMax = 260 * 1024;
+ if (config.max_post_bytes < requiredMax) {
+ this._log.error("Server configuration max_post_bytes is too low", config);
+ throw new Error("Server configuration max_post_bytes is too low");
+ }
+
+ return new PostQueue(poster, timestamp, config, log, postCallback);
+ },
+};
+
+/* A helper to manage the posting of records while respecting the various
+ size limits.
+
+ This supports the concept of a server-side "batch". The general idea is:
+ * We queue as many records as allowed in memory, then make a single POST.
+ * This first POST (optionally) gives us a batch ID, which we use for
+ all subsequent posts, until...
+ * At some point we hit a batch-maximum, and jump through a few hoops to
+ commit the current batch (ie, all previous POSTs) and start a new one.
+ * Eventually commit the final batch.
+
+ In most cases we expect there to be exactly 1 batch consisting of possibly
+ multiple POSTs.
+*/
+function PostQueue(poster, timestamp, config, log, postCallback) {
+ // The "post" function we should use when it comes time to do the post.
+ this.poster = poster;
+ this.log = log;
+
+ // The config we use. We expect it to have fields "max_post_records",
+ // "max_batch_records", "max_post_bytes", and "max_batch_bytes"
+ this.config = config;
+
+ // The callback we make with the response when we do get around to making the
+ // post (which could be during any of the enqueue() calls or the final flush())
+ // This callback may be called multiple times and must not add new items to
+ // the queue.
+ // The second argument passed to this callback is a boolean value that is true
+ // if we're in the middle of a batch, and false if either the batch is
+ // complete, or it's a post to a server that does not understand batching.
+ this.postCallback = postCallback;
+
+ // The string where we are capturing the stringified version of the records
+ // queued so far. It will always be invalid JSON as it is always missing the
+ // closing bracket.
+ this.queued = "";
+
+ // The number of records we've queued so far but are yet to POST.
+ this.numQueued = 0;
+
+ // The number of records/bytes we've processed in previous POSTs for our
+ // current batch. Does *not* include records currently queued for the next POST.
+ this.numAlreadyBatched = 0;
+ this.bytesAlreadyBatched = 0;
+
+ // The ID of our current batch. Can be undefined (meaning we are yet to make
+ // the first post of a patch, so don't know if we have a batch), null (meaning
+ // we've made the first post but the server response indicated no batching
+ // semantics), otherwise we have made the first post and it holds the batch ID
+ // returned from the server.
+ this.batchID = undefined;
+
+ // Time used for X-If-Unmodified-Since -- should be the timestamp from the last GET.
+ this.lastModified = timestamp;
+}
+
+PostQueue.prototype = {
+ enqueue(record) {
+ // We want to ensure the record has a .toJSON() method defined - even
+ // though JSON.stringify() would implicitly call it, the stringify might
+ // still work even if it isn't defined, which isn't what we want.
+ let jsonRepr = record.toJSON();
+ if (!jsonRepr) {
+ throw new Error("You must only call this with objects that explicitly support JSON");
+ }
+ let bytes = JSON.stringify(jsonRepr);
+
+ // Do a flush if we can't add this record without exceeding our single-request
+ // limits, or without exceeding the total limit for a single batch.
+ let newLength = this.queued.length + bytes.length + 2; // extras for leading "[" / "," and trailing "]"
+
+ let maxAllowedBytes = Math.min(256 * 1024, this.config.max_post_bytes);
+
+ let postSizeExceeded = this.numQueued >= this.config.max_post_records ||
+ newLength >= maxAllowedBytes;
+
+ let batchSizeExceeded = (this.numQueued + this.numAlreadyBatched) >= this.config.max_batch_records ||
+ (newLength + this.bytesAlreadyBatched) >= this.config.max_batch_bytes;
+
+ let singleRecordTooBig = bytes.length + 2 > maxAllowedBytes;
+
+ if (postSizeExceeded || batchSizeExceeded) {
+ this.log.trace(`PostQueue flushing due to postSizeExceeded=${postSizeExceeded}, batchSizeExceeded=${batchSizeExceeded}` +
+ `, max_batch_bytes: ${this.config.max_batch_bytes}, max_post_bytes: ${this.config.max_post_bytes}`);
+
+ if (singleRecordTooBig) {
+ return { enqueued: false, error: new Error("Single record too large to submit to server") };
+ }
+
+ // We need to write the queue out before handling this one, but we only
+ // commit the batch (and thus start a new one) if the batch is full.
+ // Note that if a single record is too big for the batch or post, then
+ // the batch may be empty, and so we don't flush in that case.
+ if (this.numQueued) {
+ this.flush(batchSizeExceeded || singleRecordTooBig);
+ }
+ }
+ // Either a ',' or a '[' depending on whether this is the first record.
+ this.queued += this.numQueued ? "," : "[";
+ this.queued += bytes;
+ this.numQueued++;
+ return { enqueued: true };
+ },
+
+ flush(finalBatchPost) {
+ if (!this.queued) {
+ // nothing queued - we can't be in a batch, and something has gone very
+ // bad if we think we are.
+ if (this.batchID) {
+ throw new Error(`Flush called when no queued records but we are in a batch ${this.batchID}`);
+ }
+ return;
+ }
+ // the batch query-param and headers we'll send.
+ let batch;
+ let headers = [];
+ if (this.batchID === undefined) {
+ // First commit in a (possible) batch.
+ batch = "true";
+ } else if (this.batchID) {
+ // We have an existing batch.
+ batch = this.batchID;
+ } else {
+ // Not the first post and we know we have no batch semantics.
+ batch = null;
+ }
+
+ headers.push(["x-if-unmodified-since", this.lastModified]);
+
+ this.log.info(`Posting ${this.numQueued} records of ${this.queued.length+1} bytes with batch=${batch}`);
+ let queued = this.queued + "]";
+ if (finalBatchPost) {
+ this.bytesAlreadyBatched = 0;
+ this.numAlreadyBatched = 0;
+ } else {
+ this.bytesAlreadyBatched += queued.length;
+ this.numAlreadyBatched += this.numQueued;
+ }
+ this.queued = "";
+ this.numQueued = 0;
+ let response = this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null));
+
+ if (!response.success) {
+ this.log.trace("Server error response during a batch", response);
+ // not clear what we should do here - we expect the consumer of this to
+ // abort by throwing in the postCallback below.
+ return this.postCallback(response, !finalBatchPost);
+ }
+
+ if (finalBatchPost) {
+ this.log.trace("Committed batch", this.batchID);
+ this.batchID = undefined; // we are now in "first post for the batch" state.
+ this.lastModified = response.headers["x-last-modified"];
+ return this.postCallback(response, false);
+ }
+
+ if (response.status != 202) {
+ if (this.batchID) {
+ throw new Error("Server responded non-202 success code while a batch was in progress");
+ }
+ this.batchID = null; // no batch semantics are in place.
+ this.lastModified = response.headers["x-last-modified"];
+ return this.postCallback(response, false);
+ }
+
+ // this response is saying the server has batch semantics - we should
+ // always have a batch ID in the response.
+ let responseBatchID = response.obj.batch;
+ this.log.trace("Server responsed 202 with batch", responseBatchID);
+ if (!responseBatchID) {
+ this.log.error("Invalid server response: 202 without a batch ID", response);
+ throw new Error("Invalid server response: 202 without a batch ID");
+ }
+
+ if (this.batchID === undefined) {
+ this.batchID = responseBatchID;
+ if (!this.lastModified) {
+ this.lastModified = response.headers["x-last-modified"];
+ if (!this.lastModified) {
+ throw new Error("Batch response without x-last-modified");
+ }
+ }
+ }
+
+ if (this.batchID != responseBatchID) {
+ throw new Error(`Invalid client/server batch state - client has ${this.batchID}, server has ${responseBatchID}`);
+ }
+
+ this.postCallback(response, true);
+ },
+}
diff --git a/services/sync/modules/resource.js b/services/sync/modules/resource.js
new file mode 100644
index 000000000..bf7066b9f
--- /dev/null
+++ b/services/sync/modules/resource.js
@@ -0,0 +1,669 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = [
+ "AsyncResource",
+ "Resource"
+];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cr = Components.results;
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/NetUtil.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/util.js");
+
+const DEFAULT_LOAD_FLAGS =
+ // Always validate the cache:
+ Ci.nsIRequest.LOAD_BYPASS_CACHE |
+ Ci.nsIRequest.INHIBIT_CACHING |
+ // Don't send user cookies over the wire (Bug 644734).
+ Ci.nsIRequest.LOAD_ANONYMOUS;
+
+/*
+ * AsyncResource represents a remote network resource, identified by a URI.
+ * Create an instance like so:
+ *
+ * let resource = new AsyncResource("http://foobar.com/path/to/resource");
+ *
+ * The 'resource' object has the following methods to issue HTTP requests
+ * of the corresponding HTTP methods:
+ *
+ * get(callback)
+ * put(data, callback)
+ * post(data, callback)
+ * delete(callback)
+ *
+ * 'callback' is a function with the following signature:
+ *
+ * function callback(error, result) {...}
+ *
+ * 'error' will be null on successful requests. Likewise, result will not be
+ * passed (=undefined) when an error occurs. Note that this is independent of
+ * the status of the HTTP response.
+ */
+this.AsyncResource = function AsyncResource(uri) {
+ this._log = Log.repository.getLogger(this._logName);
+ this._log.level =
+ Log.Level[Svc.Prefs.get("log.logger.network.resources")];
+ this.uri = uri;
+ this._headers = {};
+ this._onComplete = Utils.bind2(this, this._onComplete);
+}
+AsyncResource.prototype = {
+ _logName: "Sync.AsyncResource",
+
+ // ** {{{ AsyncResource.serverTime }}} **
+ //
+ // Caches the latest server timestamp (X-Weave-Timestamp header).
+ serverTime: null,
+
+ /**
+ * Callback to be invoked at request time to add authentication details.
+ *
+ * By default, a global authenticator is provided. If this is set, it will
+ * be used instead of the global one.
+ */
+ authenticator: null,
+
+ // Wait 5 minutes before killing a request.
+ ABORT_TIMEOUT: 300000,
+
+ // ** {{{ AsyncResource.headers }}} **
+ //
+ // Headers to be included when making a request for the resource.
+ // Note: Header names should be all lower case, there's no explicit
+ // check for duplicates due to case!
+ get headers() {
+ return this._headers;
+ },
+ set headers(value) {
+ this._headers = value;
+ },
+ setHeader: function Res_setHeader(header, value) {
+ this._headers[header.toLowerCase()] = value;
+ },
+ get headerNames() {
+ return Object.keys(this.headers);
+ },
+
+ // ** {{{ AsyncResource.uri }}} **
+ //
+ // URI representing this resource.
+ get uri() {
+ return this._uri;
+ },
+ set uri(value) {
+ if (typeof value == 'string')
+ this._uri = CommonUtils.makeURI(value);
+ else
+ this._uri = value;
+ },
+
+ // ** {{{ AsyncResource.spec }}} **
+ //
+ // Get the string representation of the URI.
+ get spec() {
+ if (this._uri)
+ return this._uri.spec;
+ return null;
+ },
+
+ // ** {{{ AsyncResource.data }}} **
+ //
+ // Get and set the data encapulated in the resource.
+ _data: null,
+ get data() {
+ return this._data;
+ },
+ set data(value) {
+ this._data = value;
+ },
+
+ // ** {{{ AsyncResource._createRequest }}} **
+ //
+ // This method returns a new IO Channel for requests to be made
+ // through. It is never called directly, only {{{_doRequest}}} uses it
+ // to obtain a request channel.
+ //
+ _createRequest: function Res__createRequest(method) {
+ let channel = NetUtil.newChannel({uri: this.spec, loadUsingSystemPrincipal: true})
+ .QueryInterface(Ci.nsIRequest)
+ .QueryInterface(Ci.nsIHttpChannel);
+
+ channel.loadFlags |= DEFAULT_LOAD_FLAGS;
+
+ // Setup a callback to handle channel notifications.
+ let listener = new ChannelNotificationListener(this.headerNames);
+ channel.notificationCallbacks = listener;
+
+ // Compose a UA string fragment from the various available identifiers.
+ if (Svc.Prefs.get("sendVersionInfo", true)) {
+ channel.setRequestHeader("user-agent", Utils.userAgent, false);
+ }
+
+ let headers = this.headers;
+
+ if (this.authenticator) {
+ let result = this.authenticator(this, method);
+ if (result && result.headers) {
+ for (let [k, v] of Object.entries(result.headers)) {
+ headers[k.toLowerCase()] = v;
+ }
+ }
+ } else {
+ this._log.debug("No authenticator found.");
+ }
+
+ for (let [key, value] of Object.entries(headers)) {
+ if (key == 'authorization')
+ this._log.trace("HTTP Header " + key + ": ***** (suppressed)");
+ else
+ this._log.trace("HTTP Header " + key + ": " + headers[key]);
+ channel.setRequestHeader(key, headers[key], false);
+ }
+ return channel;
+ },
+
+ _onProgress: function Res__onProgress(channel) {},
+
+ _doRequest: function _doRequest(action, data, callback) {
+ this._log.trace("In _doRequest.");
+ this._callback = callback;
+ let channel = this._createRequest(action);
+
+ if ("undefined" != typeof(data))
+ this._data = data;
+
+ // PUT and POST are treated differently because they have payload data.
+ if ("PUT" == action || "POST" == action) {
+ // Convert non-string bodies into JSON
+ if (this._data.constructor.toString() != String)
+ this._data = JSON.stringify(this._data);
+
+ this._log.debug(action + " Length: " + this._data.length);
+ this._log.trace(action + " Body: " + this._data);
+
+ let type = ('content-type' in this._headers) ?
+ this._headers['content-type'] : 'text/plain';
+
+ let stream = Cc["@mozilla.org/io/string-input-stream;1"].
+ createInstance(Ci.nsIStringInputStream);
+ stream.setData(this._data, this._data.length);
+
+ channel.QueryInterface(Ci.nsIUploadChannel);
+ channel.setUploadStream(stream, type, this._data.length);
+ }
+
+ // Setup a channel listener so that the actual network operation
+ // is performed asynchronously.
+ let listener = new ChannelListener(this._onComplete, this._onProgress,
+ this._log, this.ABORT_TIMEOUT);
+ channel.requestMethod = action;
+ try {
+ channel.asyncOpen2(listener);
+ } catch (ex) {
+ // asyncOpen2 can throw in a bunch of cases -- e.g., a forbidden port.
+ this._log.warn("Caught an error in asyncOpen2", ex);
+ CommonUtils.nextTick(callback.bind(this, ex));
+ }
+ },
+
+ _onComplete: function _onComplete(error, data, channel) {
+ this._log.trace("In _onComplete. Error is " + error + ".");
+
+ if (error) {
+ this._callback(error);
+ return;
+ }
+
+ this._data = data;
+ let action = channel.requestMethod;
+
+ this._log.trace("Channel: " + channel);
+ this._log.trace("Action: " + action);
+
+ // Process status and success first. This way a problem with headers
+ // doesn't fail to include accurate status information.
+ let status = 0;
+ let success = false;
+
+ try {
+ status = channel.responseStatus;
+ success = channel.requestSucceeded; // HTTP status.
+
+ this._log.trace("Status: " + status);
+ this._log.trace("Success: " + success);
+
+ // Log the status of the request.
+ let mesg = [action, success ? "success" : "fail", status,
+ channel.URI.spec].join(" ");
+ this._log.debug("mesg: " + mesg);
+
+ if (mesg.length > 200)
+ mesg = mesg.substr(0, 200) + "…";
+ this._log.debug(mesg);
+
+ // Additionally give the full response body when Trace logging.
+ if (this._log.level <= Log.Level.Trace)
+ this._log.trace(action + " body: " + data);
+
+ } catch(ex) {
+ // Got a response, but an exception occurred during processing.
+ // This shouldn't occur.
+ this._log.warn("Caught unexpected exception in _oncomplete", ex);
+ }
+
+ // Process headers. They can be empty, or the call can otherwise fail, so
+ // put this in its own try block.
+ let headers = {};
+ try {
+ this._log.trace("Processing response headers.");
+
+ // Read out the response headers if available.
+ channel.visitResponseHeaders({
+ visitHeader: function visitHeader(header, value) {
+ headers[header.toLowerCase()] = value;
+ }
+ });
+
+ // This is a server-side safety valve to allow slowing down
+ // clients without hurting performance.
+ if (headers["x-weave-backoff"]) {
+ let backoff = headers["x-weave-backoff"];
+ this._log.debug("Got X-Weave-Backoff: " + backoff);
+ Observers.notify("weave:service:backoff:interval",
+ parseInt(backoff, 10));
+ }
+
+ if (success && headers["x-weave-quota-remaining"]) {
+ Observers.notify("weave:service:quota:remaining",
+ parseInt(headers["x-weave-quota-remaining"], 10));
+ }
+
+ let contentLength = headers["content-length"];
+ if (success && contentLength && data &&
+ contentLength != data.length) {
+ this._log.warn("The response body's length of: " + data.length +
+ " doesn't match the header's content-length of: " +
+ contentLength + ".");
+ }
+ } catch (ex) {
+ this._log.debug("Caught exception visiting headers in _onComplete", ex);
+ }
+
+ let ret = new String(data);
+ ret.url = channel.URI.spec;
+ ret.status = status;
+ ret.success = success;
+ ret.headers = headers;
+
+ if (!success) {
+ this._log.warn(`${action} request to ${ret.url} failed with status ${status}`);
+ }
+ // Make a lazy getter to convert the json response into an object.
+ // Note that this can cause a parse error to be thrown far away from the
+ // actual fetch, so be warned!
+ XPCOMUtils.defineLazyGetter(ret, "obj", function() {
+ try {
+ return JSON.parse(ret);
+ } catch (ex) {
+ this._log.warn("Got exception parsing response body", ex);
+ // Stringify to avoid possibly printing non-printable characters.
+ this._log.debug("Parse fail: Response body starts: \"" +
+ JSON.stringify((ret + "").slice(0, 100)) +
+ "\".");
+ throw ex;
+ }
+ }.bind(this));
+
+ this._callback(null, ret);
+ },
+
+ get: function get(callback) {
+ this._doRequest("GET", undefined, callback);
+ },
+
+ put: function put(data, callback) {
+ if (typeof data == "function")
+ [data, callback] = [undefined, data];
+ this._doRequest("PUT", data, callback);
+ },
+
+ post: function post(data, callback) {
+ if (typeof data == "function")
+ [data, callback] = [undefined, data];
+ this._doRequest("POST", data, callback);
+ },
+
+ delete: function delete_(callback) {
+ this._doRequest("DELETE", undefined, callback);
+ }
+};
+
+
+/*
+ * Represent a remote network resource, identified by a URI, with a
+ * synchronous API.
+ *
+ * 'Resource' is not recommended for new code. Use the asynchronous API of
+ * 'AsyncResource' instead.
+ */
+this.Resource = function Resource(uri) {
+ AsyncResource.call(this, uri);
+}
+Resource.prototype = {
+
+ __proto__: AsyncResource.prototype,
+
+ _logName: "Sync.Resource",
+
+ // ** {{{ Resource._request }}} **
+ //
+ // Perform a particular HTTP request on the resource. This method
+ // is never called directly, but is used by the high-level
+ // {{{get}}}, {{{put}}}, {{{post}}} and {{delete}} methods.
+ _request: function Res__request(action, data) {
+ let cb = Async.makeSyncCallback();
+ function callback(error, ret) {
+ if (error)
+ cb.throw(error);
+ else
+ cb(ret);
+ }
+
+ // The channel listener might get a failure code
+ try {
+ this._doRequest(action, data, callback);
+ return Async.waitForSyncCallback(cb);
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.warn("${action} request to ${url} failed: ${ex}",
+ { action, url: this.uri.spec, ex });
+ // Combine the channel stack with this request stack. Need to create
+ // a new error object for that.
+ let error = Error(ex.message);
+ error.result = ex.result;
+ let chanStack = [];
+ if (ex.stack)
+ chanStack = ex.stack.trim().split(/\n/).slice(1);
+ let requestStack = error.stack.split(/\n/).slice(1);
+
+ // Strip out the args for the last 2 frames because they're usually HUGE!
+ for (let i = 0; i <= 1; i++)
+ requestStack[i] = requestStack[i].replace(/\(".*"\)@/, "(...)@");
+
+ error.stack = chanStack.concat(requestStack).join("\n");
+ throw error;
+ }
+ },
+
+ // ** {{{ Resource.get }}} **
+ //
+ // Perform an asynchronous HTTP GET for this resource.
+ get: function Res_get() {
+ return this._request("GET");
+ },
+
+ // ** {{{ Resource.put }}} **
+ //
+ // Perform a HTTP PUT for this resource.
+ put: function Res_put(data) {
+ return this._request("PUT", data);
+ },
+
+ // ** {{{ Resource.post }}} **
+ //
+ // Perform a HTTP POST for this resource.
+ post: function Res_post(data) {
+ return this._request("POST", data);
+ },
+
+ // ** {{{ Resource.delete }}} **
+ //
+ // Perform a HTTP DELETE for this resource.
+ delete: function Res_delete() {
+ return this._request("DELETE");
+ }
+};
+
+// = ChannelListener =
+//
+// This object implements the {{{nsIStreamListener}}} interface
+// and is called as the network operation proceeds.
+function ChannelListener(onComplete, onProgress, logger, timeout) {
+ this._onComplete = onComplete;
+ this._onProgress = onProgress;
+ this._log = logger;
+ this._timeout = timeout;
+ this.delayAbort();
+}
+ChannelListener.prototype = {
+
+ onStartRequest: function Channel_onStartRequest(channel) {
+ this._log.trace("onStartRequest called for channel " + channel + ".");
+
+ try {
+ channel.QueryInterface(Ci.nsIHttpChannel);
+ } catch (ex) {
+ this._log.error("Unexpected error: channel is not a nsIHttpChannel!");
+ channel.cancel(Cr.NS_BINDING_ABORTED);
+ return;
+ }
+
+ // Save the latest server timestamp when possible.
+ try {
+ AsyncResource.serverTime = channel.getResponseHeader("X-Weave-Timestamp") - 0;
+ }
+ catch(ex) {}
+
+ this._log.trace("onStartRequest: " + channel.requestMethod + " " +
+ channel.URI.spec);
+ this._data = '';
+ this.delayAbort();
+ },
+
+ onStopRequest: function Channel_onStopRequest(channel, context, status) {
+ // Clear the abort timer now that the channel is done.
+ this.abortTimer.clear();
+
+ if (!this._onComplete) {
+ this._log.error("Unexpected error: _onComplete not defined in onStopRequest.");
+ this._onProgress = null;
+ return;
+ }
+
+ try {
+ channel.QueryInterface(Ci.nsIHttpChannel);
+ } catch (ex) {
+ this._log.error("Unexpected error: channel is not a nsIHttpChannel!");
+
+ this._onComplete(ex, this._data, channel);
+ this._onComplete = this._onProgress = null;
+ return;
+ }
+
+ let statusSuccess = Components.isSuccessCode(status);
+ let uri = channel && channel.URI && channel.URI.spec || "<unknown>";
+ this._log.trace("Channel for " + channel.requestMethod + " " + uri + ": " +
+ "isSuccessCode(" + status + ")? " + statusSuccess);
+
+ if (this._data == '') {
+ this._data = null;
+ }
+
+ // Pass back the failure code and stop execution. Use Components.Exception()
+ // instead of Error() so the exception is QI-able and can be passed across
+ // XPCOM borders while preserving the status code.
+ if (!statusSuccess) {
+ let message = Components.Exception("", status).name;
+ let error = Components.Exception(message, status);
+
+ this._onComplete(error, undefined, channel);
+ this._onComplete = this._onProgress = null;
+ return;
+ }
+
+ this._log.trace("Channel: flags = " + channel.loadFlags +
+ ", URI = " + uri +
+ ", HTTP success? " + channel.requestSucceeded);
+ this._onComplete(null, this._data, channel);
+ this._onComplete = this._onProgress = null;
+ },
+
+ onDataAvailable: function Channel_onDataAvail(req, cb, stream, off, count) {
+ let siStream;
+ try {
+ siStream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(Ci.nsIScriptableInputStream);
+ siStream.init(stream);
+ } catch (ex) {
+ this._log.warn("Exception creating nsIScriptableInputStream", ex);
+ this._log.debug("Parameters: " + req.URI.spec + ", " + stream + ", " + off + ", " + count);
+ // Cannot proceed, so rethrow and allow the channel to cancel itself.
+ throw ex;
+ }
+
+ try {
+ this._data += siStream.read(count);
+ } catch (ex) {
+ this._log.warn("Exception thrown reading " + count + " bytes from " + siStream + ".");
+ throw ex;
+ }
+
+ try {
+ this._onProgress();
+ } catch (ex) {
+ if (Async.isShutdownException(ex)) {
+ throw ex;
+ }
+ this._log.warn("Got exception calling onProgress handler during fetch of "
+ + req.URI.spec, ex);
+ this._log.trace("Rethrowing; expect a failure code from the HTTP channel.");
+ throw ex;
+ }
+
+ this.delayAbort();
+ },
+
+ /**
+ * Create or push back the abort timer that kills this request.
+ */
+ delayAbort: function delayAbort() {
+ try {
+ CommonUtils.namedTimer(this.abortRequest, this._timeout, this, "abortTimer");
+ } catch (ex) {
+ this._log.warn("Got exception extending abort timer", ex);
+ }
+ },
+
+ abortRequest: function abortRequest() {
+ // Ignore any callbacks if we happen to get any now
+ this.onStopRequest = function() {};
+ let error = Components.Exception("Aborting due to channel inactivity.",
+ Cr.NS_ERROR_NET_TIMEOUT);
+ if (!this._onComplete) {
+ this._log.error("Unexpected error: _onComplete not defined in " +
+ "abortRequest.");
+ return;
+ }
+ this._onComplete(error);
+ }
+};
+
+/**
+ * This class handles channel notification events.
+ *
+ * An instance of this class is bound to each created channel.
+ *
+ * Optionally pass an array of header names. Each header named
+ * in this array will be copied between the channels in the
+ * event of a redirect.
+ */
+function ChannelNotificationListener(headersToCopy) {
+ this._headersToCopy = headersToCopy;
+
+ this._log = Log.repository.getLogger(this._logName);
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.network.resources")];
+}
+ChannelNotificationListener.prototype = {
+ _logName: "Sync.Resource",
+
+ getInterface: function(aIID) {
+ return this.QueryInterface(aIID);
+ },
+
+ QueryInterface: function(aIID) {
+ if (aIID.equals(Ci.nsIBadCertListener2) ||
+ aIID.equals(Ci.nsIInterfaceRequestor) ||
+ aIID.equals(Ci.nsISupports) ||
+ aIID.equals(Ci.nsIChannelEventSink))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+ notifyCertProblem: function certProblem(socketInfo, sslStatus, targetHost) {
+ let log = Log.repository.getLogger("Sync.CertListener");
+ log.warn("Invalid HTTPS certificate encountered!");
+
+ // This suppresses the UI warning only. The request is still cancelled.
+ return true;
+ },
+
+ asyncOnChannelRedirect:
+ function asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) {
+
+ let oldSpec = (oldChannel && oldChannel.URI) ? oldChannel.URI.spec : "<undefined>";
+ let newSpec = (newChannel && newChannel.URI) ? newChannel.URI.spec : "<undefined>";
+ this._log.debug("Channel redirect: " + oldSpec + ", " + newSpec + ", " + flags);
+
+ this._log.debug("Ensuring load flags are set.");
+ newChannel.loadFlags |= DEFAULT_LOAD_FLAGS;
+
+ // For internal redirects, copy the headers that our caller set.
+ try {
+ if ((flags & Ci.nsIChannelEventSink.REDIRECT_INTERNAL) &&
+ newChannel.URI.equals(oldChannel.URI)) {
+ this._log.debug("Copying headers for safe internal redirect.");
+
+ // QI the channel so we can set headers on it.
+ try {
+ newChannel.QueryInterface(Ci.nsIHttpChannel);
+ } catch (ex) {
+ this._log.error("Unexpected error: channel is not a nsIHttpChannel!");
+ throw ex;
+ }
+
+ for (let header of this._headersToCopy) {
+ let value = oldChannel.getRequestHeader(header);
+ if (value) {
+ let printed = (header == "authorization") ? "****" : value;
+ this._log.debug("Header: " + header + " = " + printed);
+ newChannel.setRequestHeader(header, value, false);
+ } else {
+ this._log.warn("No value for header " + header);
+ }
+ }
+ }
+ } catch (ex) {
+ this._log.error("Error copying headers", ex);
+ }
+
+ // We let all redirects proceed.
+ try {
+ callback.onRedirectVerifyCallback(Cr.NS_OK);
+ } catch (ex) {
+ this._log.error("onRedirectVerifyCallback threw!", ex);
+ }
+ }
+};
diff --git a/services/sync/modules/rest.js b/services/sync/modules/rest.js
new file mode 100644
index 000000000..94c096dba
--- /dev/null
+++ b/services/sync/modules/rest.js
@@ -0,0 +1,90 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/constants.js");
+
+this.EXPORTED_SYMBOLS = ["SyncStorageRequest"];
+
+const STORAGE_REQUEST_TIMEOUT = 5 * 60; // 5 minutes
+
+/**
+ * RESTRequest variant for use against a Sync storage server.
+ */
+this.SyncStorageRequest = function SyncStorageRequest(uri) {
+ RESTRequest.call(this, uri);
+
+ this.authenticator = null;
+}
+SyncStorageRequest.prototype = {
+
+ __proto__: RESTRequest.prototype,
+
+ _logName: "Sync.StorageRequest",
+
+ /**
+ * Wait 5 minutes before killing a request.
+ */
+ timeout: STORAGE_REQUEST_TIMEOUT,
+
+ dispatch: function dispatch(method, data, onComplete, onProgress) {
+ // Compose a UA string fragment from the various available identifiers.
+ if (Svc.Prefs.get("sendVersionInfo", true)) {
+ this.setHeader("user-agent", Utils.userAgent);
+ }
+
+ if (this.authenticator) {
+ this.authenticator(this);
+ } else {
+ this._log.debug("No authenticator found.");
+ }
+
+ return RESTRequest.prototype.dispatch.apply(this, arguments);
+ },
+
+ onStartRequest: function onStartRequest(channel) {
+ RESTRequest.prototype.onStartRequest.call(this, channel);
+ if (this.status == this.ABORTED) {
+ return;
+ }
+
+ let headers = this.response.headers;
+ // Save the latest server timestamp when possible.
+ if (headers["x-weave-timestamp"]) {
+ SyncStorageRequest.serverTime = parseFloat(headers["x-weave-timestamp"]);
+ }
+
+ // This is a server-side safety valve to allow slowing down
+ // clients without hurting performance.
+ if (headers["x-weave-backoff"]) {
+ Svc.Obs.notify("weave:service:backoff:interval",
+ parseInt(headers["x-weave-backoff"], 10));
+ }
+
+ if (this.response.success && headers["x-weave-quota-remaining"]) {
+ Svc.Obs.notify("weave:service:quota:remaining",
+ parseInt(headers["x-weave-quota-remaining"], 10));
+ }
+ },
+
+ onStopRequest: function onStopRequest(channel, context, statusCode) {
+ if (this.status != this.ABORTED) {
+ let resp = this.response;
+ let contentLength = resp.headers ? resp.headers["content-length"] : "";
+
+ if (resp.success && contentLength &&
+ contentLength != resp.body.length) {
+ this._log.warn("The response body's length of: " + resp.body.length +
+ " doesn't match the header's content-length of: " +
+ contentLength + ".");
+ }
+ }
+
+ RESTRequest.prototype.onStopRequest.apply(this, arguments);
+ }
+};
diff --git a/services/sync/modules/service.js b/services/sync/modules/service.js
new file mode 100644
index 000000000..5fc0fa7a7
--- /dev/null
+++ b/services/sync/modules/service.js
@@ -0,0 +1,1756 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["Service"];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cr = Components.results;
+var Cu = Components.utils;
+
+// How long before refreshing the cluster
+const CLUSTER_BACKOFF = 5 * 60 * 1000; // 5 minutes
+
+// How long a key to generate from an old passphrase.
+const PBKDF2_KEY_BYTES = 16;
+
+const CRYPTO_COLLECTION = "crypto";
+const KEYS_WBO = "keys";
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/rest.js");
+Cu.import("resource://services-sync/stages/enginesync.js");
+Cu.import("resource://services-sync/stages/declined.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/telemetry.js");
+Cu.import("resource://services-sync/userapi.js");
+Cu.import("resource://services-sync/util.js");
+
+const ENGINE_MODULES = {
+ Addons: "addons.js",
+ Bookmarks: "bookmarks.js",
+ Form: "forms.js",
+ History: "history.js",
+ Password: "passwords.js",
+ Prefs: "prefs.js",
+ Tab: "tabs.js",
+ ExtensionStorage: "extension-storage.js",
+};
+
+const STORAGE_INFO_TYPES = [INFO_COLLECTIONS,
+ INFO_COLLECTION_USAGE,
+ INFO_COLLECTION_COUNTS,
+ INFO_QUOTA];
+
+function Sync11Service() {
+ this._notify = Utils.notify("weave:service:");
+}
+Sync11Service.prototype = {
+
+ _lock: Utils.lock,
+ _locked: false,
+ _loggedIn: false,
+
+ infoURL: null,
+ storageURL: null,
+ metaURL: null,
+ cryptoKeyURL: null,
+ // The cluster URL comes via the ClusterManager object, which in the FxA
+ // world is ebbedded in the token returned from the token server.
+ _clusterURL: null,
+
+ get serverURL() {
+ return Svc.Prefs.get("serverURL");
+ },
+ set serverURL(value) {
+ if (!value.endsWith("/")) {
+ value += "/";
+ }
+
+ // Only do work if it's actually changing
+ if (value == this.serverURL)
+ return;
+
+ Svc.Prefs.set("serverURL", value);
+
+ // A new server most likely uses a different cluster, so clear that.
+ this._clusterURL = null;
+ },
+
+ get clusterURL() {
+ return this._clusterURL || "";
+ },
+ set clusterURL(value) {
+ if (value != null && typeof value != "string") {
+ throw new Error("cluster must be a string, got " + (typeof value));
+ }
+ this._clusterURL = value;
+ this._updateCachedURLs();
+ },
+
+ get miscAPI() {
+ // Append to the serverURL if it's a relative fragment
+ let misc = Svc.Prefs.get("miscURL");
+ if (misc.indexOf(":") == -1)
+ misc = this.serverURL + misc;
+ return misc + MISC_API_VERSION + "/";
+ },
+
+ /**
+ * The URI of the User API service.
+ *
+ * This is the base URI of the service as applicable to all users up to
+ * and including the server version path component, complete with trailing
+ * forward slash.
+ */
+ get userAPIURI() {
+ // Append to the serverURL if it's a relative fragment.
+ let url = Svc.Prefs.get("userURL");
+ if (!url.includes(":")) {
+ url = this.serverURL + url;
+ }
+
+ return url + USER_API_VERSION + "/";
+ },
+
+ get pwResetURL() {
+ return this.serverURL + "weave-password-reset";
+ },
+
+ get syncID() {
+ // Generate a random syncID id we don't have one
+ let syncID = Svc.Prefs.get("client.syncID", "");
+ return syncID == "" ? this.syncID = Utils.makeGUID() : syncID;
+ },
+ set syncID(value) {
+ Svc.Prefs.set("client.syncID", value);
+ },
+
+ get isLoggedIn() { return this._loggedIn; },
+
+ get locked() { return this._locked; },
+ lock: function lock() {
+ if (this._locked)
+ return false;
+ this._locked = true;
+ return true;
+ },
+ unlock: function unlock() {
+ this._locked = false;
+ },
+
+ // A specialized variant of Utils.catch.
+ // This provides a more informative error message when we're already syncing:
+ // see Bug 616568.
+ _catch: function _catch(func) {
+ function lockExceptions(ex) {
+ if (Utils.isLockException(ex)) {
+ // This only happens if we're syncing already.
+ this._log.info("Cannot start sync: already syncing?");
+ }
+ }
+
+ return Utils.catch.call(this, func, lockExceptions);
+ },
+
+ get userBaseURL() {
+ if (!this._clusterManager) {
+ return null;
+ }
+ return this._clusterManager.getUserBaseURL();
+ },
+
+ _updateCachedURLs: function _updateCachedURLs() {
+ // Nothing to cache yet if we don't have the building blocks
+ if (!this.clusterURL || !this.identity.username) {
+ // Also reset all other URLs used by Sync to ensure we aren't accidentally
+ // using one cached earlier - if there's no cluster URL any cached ones
+ // are invalid.
+ this.infoURL = undefined;
+ this.storageURL = undefined;
+ this.metaURL = undefined;
+ this.cryptoKeysURL = undefined;
+ return;
+ }
+
+ this._log.debug("Caching URLs under storage user base: " + this.userBaseURL);
+
+ // Generate and cache various URLs under the storage API for this user
+ this.infoURL = this.userBaseURL + "info/collections";
+ this.storageURL = this.userBaseURL + "storage/";
+ this.metaURL = this.storageURL + "meta/global";
+ this.cryptoKeysURL = this.storageURL + CRYPTO_COLLECTION + "/" + KEYS_WBO;
+ },
+
+ _checkCrypto: function _checkCrypto() {
+ let ok = false;
+
+ try {
+ let iv = Svc.Crypto.generateRandomIV();
+ if (iv.length == 24)
+ ok = true;
+
+ } catch (e) {
+ this._log.debug("Crypto check failed: " + e);
+ }
+
+ return ok;
+ },
+
+ /**
+ * Here is a disgusting yet reasonable way of handling HMAC errors deep in
+ * the guts of Sync. The astute reader will note that this is a hacky way of
+ * implementing something like continuable conditions.
+ *
+ * A handler function is glued to each engine. If the engine discovers an
+ * HMAC failure, we fetch keys from the server and update our keys, just as
+ * we would on startup.
+ *
+ * If our key collection changed, we signal to the engine (via our return
+ * value) that it should retry decryption.
+ *
+ * If our key collection did not change, it means that we already had the
+ * correct keys... and thus a different client has the wrong ones. Reupload
+ * the bundle that we fetched, which will bump the modified time on the
+ * server and (we hope) prompt a broken client to fix itself.
+ *
+ * We keep track of the time at which we last applied this reasoning, because
+ * thrashing doesn't solve anything. We keep a reasonable interval between
+ * these remedial actions.
+ */
+ lastHMACEvent: 0,
+
+ /*
+ * Returns whether to try again.
+ */
+ handleHMACEvent: function handleHMACEvent() {
+ let now = Date.now();
+
+ // Leave a sizable delay between HMAC recovery attempts. This gives us
+ // time for another client to fix themselves if we touch the record.
+ if ((now - this.lastHMACEvent) < HMAC_EVENT_INTERVAL)
+ return false;
+
+ this._log.info("Bad HMAC event detected. Attempting recovery " +
+ "or signaling to other clients.");
+
+ // Set the last handled time so that we don't act again.
+ this.lastHMACEvent = now;
+
+ // Fetch keys.
+ let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
+ try {
+ let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+
+ // Save out the ciphertext for when we reupload. If there's a bug in
+ // CollectionKeyManager, this will prevent us from uploading junk.
+ let cipherText = cryptoKeys.ciphertext;
+
+ if (!cryptoResp.success) {
+ this._log.warn("Failed to download keys.");
+ return false;
+ }
+
+ let keysChanged = this.handleFetchedKeys(this.identity.syncKeyBundle,
+ cryptoKeys, true);
+ if (keysChanged) {
+ // Did they change? If so, carry on.
+ this._log.info("Suggesting retry.");
+ return true; // Try again.
+ }
+
+ // If not, reupload them and continue the current sync.
+ cryptoKeys.ciphertext = cipherText;
+ cryptoKeys.cleartext = null;
+
+ let uploadResp = cryptoKeys.upload(this.resource(this.cryptoKeysURL));
+ if (uploadResp.success)
+ this._log.info("Successfully re-uploaded keys. Continuing sync.");
+ else
+ this._log.warn("Got error response re-uploading keys. " +
+ "Continuing sync; let's try again later.");
+
+ return false; // Don't try again: same keys.
+
+ } catch (ex) {
+ this._log.warn("Got exception \"" + ex + "\" fetching and handling " +
+ "crypto keys. Will try again later.");
+ return false;
+ }
+ },
+
+ handleFetchedKeys: function handleFetchedKeys(syncKey, cryptoKeys, skipReset) {
+ // Don't want to wipe if we're just starting up!
+ let wasBlank = this.collectionKeys.isClear;
+ let keysChanged = this.collectionKeys.updateContents(syncKey, cryptoKeys);
+
+ if (keysChanged && !wasBlank) {
+ this._log.debug("Keys changed: " + JSON.stringify(keysChanged));
+
+ if (!skipReset) {
+ this._log.info("Resetting client to reflect key change.");
+
+ if (keysChanged.length) {
+ // Collection keys only. Reset individual engines.
+ this.resetClient(keysChanged);
+ }
+ else {
+ // Default key changed: wipe it all.
+ this.resetClient();
+ }
+
+ this._log.info("Downloaded new keys, client reset. Proceeding.");
+ }
+ return true;
+ }
+ return false;
+ },
+
+ /**
+ * Prepare to initialize the rest of Weave after waiting a little bit
+ */
+ onStartup: function onStartup() {
+ this._migratePrefs();
+
+ // Status is instantiated before us and is the first to grab an instance of
+ // the IdentityManager. We use that instance because IdentityManager really
+ // needs to be a singleton. Ideally, the longer-lived object would spawn
+ // this service instance.
+ if (!Status || !Status._authManager) {
+ throw new Error("Status or Status._authManager not initialized.");
+ }
+
+ this.status = Status;
+ this.identity = Status._authManager;
+ this.collectionKeys = new CollectionKeyManager();
+
+ this.errorHandler = new ErrorHandler(this);
+
+ this._log = Log.repository.getLogger("Sync.Service");
+ this._log.level =
+ Log.Level[Svc.Prefs.get("log.logger.service.main")];
+
+ this._log.info("Loading Weave " + WEAVE_VERSION);
+
+ this._clusterManager = this.identity.createClusterManager(this);
+ this.recordManager = new RecordManager(this);
+
+ this.enabled = true;
+
+ this._registerEngines();
+
+ let ua = Cc["@mozilla.org/network/protocol;1?name=http"].
+ getService(Ci.nsIHttpProtocolHandler).userAgent;
+ this._log.info(ua);
+
+ if (!this._checkCrypto()) {
+ this.enabled = false;
+ this._log.info("Could not load the Weave crypto component. Disabling " +
+ "Weave, since it will not work correctly.");
+ }
+
+ Svc.Obs.add("weave:service:setup-complete", this);
+ Svc.Obs.add("sync:collection_changed", this); // Pulled from FxAccountsCommon
+ Svc.Prefs.observe("engine.", this);
+
+ this.scheduler = new SyncScheduler(this);
+
+ if (!this.enabled) {
+ this._log.info("Firefox Sync disabled.");
+ }
+
+ this._updateCachedURLs();
+
+ let status = this._checkSetup();
+ if (status != STATUS_DISABLED && status != CLIENT_NOT_CONFIGURED) {
+ Svc.Obs.notify("weave:engine:start-tracking");
+ }
+
+ // Send an event now that Weave service is ready. We don't do this
+ // synchronously so that observers can import this module before
+ // registering an observer.
+ Utils.nextTick(function onNextTick() {
+ this.status.ready = true;
+
+ // UI code uses the flag on the XPCOM service so it doesn't have
+ // to load a bunch of modules.
+ let xps = Cc["@mozilla.org/weave/service;1"]
+ .getService(Ci.nsISupports)
+ .wrappedJSObject;
+ xps.ready = true;
+
+ Svc.Obs.notify("weave:service:ready");
+ }.bind(this));
+ },
+
+ _checkSetup: function _checkSetup() {
+ if (!this.enabled) {
+ return this.status.service = STATUS_DISABLED;
+ }
+ return this.status.checkSetup();
+ },
+
+ _migratePrefs: function _migratePrefs() {
+ // Migrate old debugLog prefs.
+ let logLevel = Svc.Prefs.get("log.appender.debugLog");
+ if (logLevel) {
+ Svc.Prefs.set("log.appender.file.level", logLevel);
+ Svc.Prefs.reset("log.appender.debugLog");
+ }
+ if (Svc.Prefs.get("log.appender.debugLog.enabled")) {
+ Svc.Prefs.set("log.appender.file.logOnSuccess", true);
+ Svc.Prefs.reset("log.appender.debugLog.enabled");
+ }
+
+ // Migrate old extensions.weave.* prefs if we haven't already tried.
+ if (Svc.Prefs.get("migrated", false))
+ return;
+
+ // Grab the list of old pref names
+ let oldPrefBranch = "extensions.weave.";
+ let oldPrefNames = Cc["@mozilla.org/preferences-service;1"].
+ getService(Ci.nsIPrefService).
+ getBranch(oldPrefBranch).
+ getChildList("", {});
+
+ // Map each old pref to the current pref branch
+ let oldPref = new Preferences(oldPrefBranch);
+ for (let pref of oldPrefNames)
+ Svc.Prefs.set(pref, oldPref.get(pref));
+
+ // Remove all the old prefs and remember that we've migrated
+ oldPref.resetBranch("");
+ Svc.Prefs.set("migrated", true);
+ },
+
+ /**
+ * Register the built-in engines for certain applications
+ */
+ _registerEngines: function _registerEngines() {
+ this.engineManager = new EngineManager(this);
+
+ let engines = [];
+ // Applications can provide this preference (comma-separated list)
+ // to specify which engines should be registered on startup.
+ let pref = Svc.Prefs.get("registerEngines");
+ if (pref) {
+ engines = pref.split(",");
+ }
+
+ let declined = [];
+ pref = Svc.Prefs.get("declinedEngines");
+ if (pref) {
+ declined = pref.split(",");
+ }
+
+ this.clientsEngine = new ClientEngine(this);
+
+ for (let name of engines) {
+ if (!name in ENGINE_MODULES) {
+ this._log.info("Do not know about engine: " + name);
+ continue;
+ }
+
+ let ns = {};
+ try {
+ Cu.import("resource://services-sync/engines/" + ENGINE_MODULES[name], ns);
+
+ let engineName = name + "Engine";
+ if (!(engineName in ns)) {
+ this._log.warn("Could not find exported engine instance: " + engineName);
+ continue;
+ }
+
+ this.engineManager.register(ns[engineName]);
+ } catch (ex) {
+ this._log.warn("Could not register engine " + name, ex);
+ }
+ }
+
+ this.engineManager.setDeclined(declined);
+ },
+
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsIObserver,
+ Ci.nsISupportsWeakReference]),
+
+ // nsIObserver
+
+ observe: function observe(subject, topic, data) {
+ switch (topic) {
+ // Ideally this observer should be in the SyncScheduler, but it would require
+ // some work to know about the sync specific engines. We should move this there once it does.
+ case "sync:collection_changed":
+ if (data.includes("clients")) {
+ this.sync([]); // [] = clients collection only
+ }
+ break;
+ case "weave:service:setup-complete":
+ let status = this._checkSetup();
+ if (status != STATUS_DISABLED && status != CLIENT_NOT_CONFIGURED)
+ Svc.Obs.notify("weave:engine:start-tracking");
+ break;
+ case "nsPref:changed":
+ if (this._ignorePrefObserver)
+ return;
+ let engine = data.slice((PREFS_BRANCH + "engine.").length);
+ this._handleEngineStatusChanged(engine);
+ break;
+ }
+ },
+
+ _handleEngineStatusChanged: function handleEngineDisabled(engine) {
+ this._log.trace("Status for " + engine + " engine changed.");
+ if (Svc.Prefs.get("engineStatusChanged." + engine, false)) {
+ // The enabled status being changed back to what it was before.
+ Svc.Prefs.reset("engineStatusChanged." + engine);
+ } else {
+ // Remember that the engine status changed locally until the next sync.
+ Svc.Prefs.set("engineStatusChanged." + engine, true);
+ }
+ },
+
+ /**
+ * Obtain a Resource instance with authentication credentials.
+ */
+ resource: function resource(url) {
+ let res = new Resource(url);
+ res.authenticator = this.identity.getResourceAuthenticator();
+
+ return res;
+ },
+
+ /**
+ * Obtain a SyncStorageRequest instance with authentication credentials.
+ */
+ getStorageRequest: function getStorageRequest(url) {
+ let request = new SyncStorageRequest(url);
+ request.authenticator = this.identity.getRESTRequestAuthenticator();
+
+ return request;
+ },
+
+ /**
+ * Perform the info fetch as part of a login or key fetch, or
+ * inside engine sync.
+ */
+ _fetchInfo: function (url) {
+ let infoURL = url || this.infoURL;
+
+ this._log.trace("In _fetchInfo: " + infoURL);
+ let info;
+ try {
+ info = this.resource(infoURL).get();
+ } catch (ex) {
+ this.errorHandler.checkServerError(ex);
+ throw ex;
+ }
+
+ // Always check for errors; this is also where we look for X-Weave-Alert.
+ this.errorHandler.checkServerError(info);
+ if (!info.success) {
+ this._log.error("Aborting sync: failed to get collections.")
+ throw info;
+ }
+ return info;
+ },
+
+ verifyAndFetchSymmetricKeys: function verifyAndFetchSymmetricKeys(infoResponse) {
+
+ this._log.debug("Fetching and verifying -- or generating -- symmetric keys.");
+
+ // Don't allow empty/missing passphrase.
+ // Furthermore, we assume that our sync key is already upgraded,
+ // and fail if that assumption is invalidated.
+
+ if (!this.identity.syncKey) {
+ this.status.login = LOGIN_FAILED_NO_PASSPHRASE;
+ this.status.sync = CREDENTIALS_CHANGED;
+ return false;
+ }
+
+ let syncKeyBundle = this.identity.syncKeyBundle;
+ if (!syncKeyBundle) {
+ this._log.error("Sync Key Bundle not set. Invalid Sync Key?");
+
+ this.status.login = LOGIN_FAILED_INVALID_PASSPHRASE;
+ this.status.sync = CREDENTIALS_CHANGED;
+ return false;
+ }
+
+ try {
+ if (!infoResponse)
+ infoResponse = this._fetchInfo(); // Will throw an exception on failure.
+
+ // This only applies when the server is already at version 4.
+ if (infoResponse.status != 200) {
+ this._log.warn("info/collections returned non-200 response. Failing key fetch.");
+ this.status.login = LOGIN_FAILED_SERVER_ERROR;
+ this.errorHandler.checkServerError(infoResponse);
+ return false;
+ }
+
+ let infoCollections = infoResponse.obj;
+
+ this._log.info("Testing info/collections: " + JSON.stringify(infoCollections));
+
+ if (this.collectionKeys.updateNeeded(infoCollections)) {
+ this._log.info("collection keys reports that a key update is needed.");
+
+ // Don't always set to CREDENTIALS_CHANGED -- we will probably take care of this.
+
+ // Fetch storage/crypto/keys.
+ let cryptoKeys;
+
+ if (infoCollections && (CRYPTO_COLLECTION in infoCollections)) {
+ try {
+ cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
+ let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+
+ if (cryptoResp.success) {
+ let keysChanged = this.handleFetchedKeys(syncKeyBundle, cryptoKeys);
+ return true;
+ }
+ else if (cryptoResp.status == 404) {
+ // On failure, ask to generate new keys and upload them.
+ // Fall through to the behavior below.
+ this._log.warn("Got 404 for crypto/keys, but 'crypto' in info/collections. Regenerating.");
+ cryptoKeys = null;
+ }
+ else {
+ // Some other problem.
+ this.status.login = LOGIN_FAILED_SERVER_ERROR;
+ this.errorHandler.checkServerError(cryptoResp);
+ this._log.warn("Got status " + cryptoResp.status + " fetching crypto keys.");
+ return false;
+ }
+ }
+ catch (ex) {
+ this._log.warn("Got exception \"" + ex + "\" fetching cryptoKeys.");
+ // TODO: Um, what exceptions might we get here? Should we re-throw any?
+
+ // One kind of exception: HMAC failure.
+ if (Utils.isHMACMismatch(ex)) {
+ this.status.login = LOGIN_FAILED_INVALID_PASSPHRASE;
+ this.status.sync = CREDENTIALS_CHANGED;
+ }
+ else {
+ // In the absence of further disambiguation or more precise
+ // failure constants, just report failure.
+ this.status.login = LOGIN_FAILED;
+ }
+ return false;
+ }
+ }
+ else {
+ this._log.info("... 'crypto' is not a reported collection. Generating new keys.");
+ }
+
+ if (!cryptoKeys) {
+ this._log.info("No keys! Generating new ones.");
+
+ // Better make some and upload them, and wipe the server to ensure
+ // consistency. This is all achieved via _freshStart.
+ // If _freshStart fails to clear the server or upload keys, it will
+ // throw.
+ this._freshStart();
+ return true;
+ }
+
+ // Last-ditch case.
+ return false;
+ }
+ else {
+ // No update needed: we're good!
+ return true;
+ }
+
+ } catch (ex) {
+ // This means no keys are present, or there's a network error.
+ this._log.debug("Failed to fetch and verify keys", ex);
+ this.errorHandler.checkServerError(ex);
+ return false;
+ }
+ },
+
+ verifyLogin: function verifyLogin(allow40XRecovery = true) {
+ if (!this.identity.username) {
+ this._log.warn("No username in verifyLogin.");
+ this.status.login = LOGIN_FAILED_NO_USERNAME;
+ return false;
+ }
+
+ // Attaching auth credentials to a request requires access to
+ // passwords, which means that Resource.get can throw MP-related
+ // exceptions!
+ // So we ask the identity to verify the login state after unlocking the
+ // master password (ie, this call is expected to prompt for MP unlock
+ // if necessary) while we still have control.
+ let cb = Async.makeSpinningCallback();
+ this.identity.unlockAndVerifyAuthState().then(
+ result => cb(null, result),
+ cb
+ );
+ let unlockedState = cb.wait();
+ this._log.debug("Fetching unlocked auth state returned " + unlockedState);
+ if (unlockedState != STATUS_OK) {
+ this.status.login = unlockedState;
+ return false;
+ }
+
+ try {
+ // Make sure we have a cluster to verify against.
+ // This is a little weird, if we don't get a node we pretend
+ // to succeed, since that probably means we just don't have storage.
+ if (this.clusterURL == "" && !this._clusterManager.setCluster()) {
+ this.status.sync = NO_SYNC_NODE_FOUND;
+ return true;
+ }
+
+ // Fetch collection info on every startup.
+ let test = this.resource(this.infoURL).get();
+
+ switch (test.status) {
+ case 200:
+ // The user is authenticated.
+
+ // We have no way of verifying the passphrase right now,
+ // so wait until remoteSetup to do so.
+ // Just make the most trivial checks.
+ if (!this.identity.syncKey) {
+ this._log.warn("No passphrase in verifyLogin.");
+ this.status.login = LOGIN_FAILED_NO_PASSPHRASE;
+ return false;
+ }
+
+ // Go ahead and do remote setup, so that we can determine
+ // conclusively that our passphrase is correct.
+ if (this._remoteSetup(test)) {
+ // Username/password verified.
+ this.status.login = LOGIN_SUCCEEDED;
+ return true;
+ }
+
+ this._log.warn("Remote setup failed.");
+ // Remote setup must have failed.
+ return false;
+
+ case 401:
+ this._log.warn("401: login failed.");
+ // Fall through to the 404 case.
+
+ case 404:
+ // Check that we're verifying with the correct cluster
+ if (allow40XRecovery && this._clusterManager.setCluster()) {
+ return this.verifyLogin(false);
+ }
+
+ // We must have the right cluster, but the server doesn't expect us.
+ // The implications of this depend on the identity being used - for
+ // the legacy identity, it's an authoritatively "incorrect password",
+ // (ie, LOGIN_FAILED_LOGIN_REJECTED) but for FxA it probably means
+ // "transient error fetching auth token".
+ this.status.login = this.identity.loginStatusFromVerification404();
+ return false;
+
+ default:
+ // Server didn't respond with something that we expected
+ this.status.login = LOGIN_FAILED_SERVER_ERROR;
+ this.errorHandler.checkServerError(test);
+ return false;
+ }
+ } catch (ex) {
+ // Must have failed on some network issue
+ this._log.debug("verifyLogin failed", ex);
+ this.status.login = LOGIN_FAILED_NETWORK_ERROR;
+ this.errorHandler.checkServerError(ex);
+ return false;
+ }
+ },
+
+ generateNewSymmetricKeys: function generateNewSymmetricKeys() {
+ this._log.info("Generating new keys WBO...");
+ let wbo = this.collectionKeys.generateNewKeysWBO();
+ this._log.info("Encrypting new key bundle.");
+ wbo.encrypt(this.identity.syncKeyBundle);
+
+ this._log.info("Uploading...");
+ let uploadRes = wbo.upload(this.resource(this.cryptoKeysURL));
+ if (uploadRes.status != 200) {
+ this._log.warn("Got status " + uploadRes.status + " uploading new keys. What to do? Throw!");
+ this.errorHandler.checkServerError(uploadRes);
+ throw new Error("Unable to upload symmetric keys.");
+ }
+ this._log.info("Got status " + uploadRes.status + " uploading keys.");
+ let serverModified = uploadRes.obj; // Modified timestamp according to server.
+ this._log.debug("Server reports crypto modified: " + serverModified);
+
+ // Now verify that info/collections shows them!
+ this._log.debug("Verifying server collection records.");
+ let info = this._fetchInfo();
+ this._log.debug("info/collections is: " + info);
+
+ if (info.status != 200) {
+ this._log.warn("Non-200 info/collections response. Aborting.");
+ throw new Error("Unable to upload symmetric keys.");
+ }
+
+ info = info.obj;
+ if (!(CRYPTO_COLLECTION in info)) {
+ this._log.error("Consistency failure: info/collections excludes " +
+ "crypto after successful upload.");
+ throw new Error("Symmetric key upload failed.");
+ }
+
+ // Can't check against local modified: clock drift.
+ if (info[CRYPTO_COLLECTION] < serverModified) {
+ this._log.error("Consistency failure: info/collections crypto entry " +
+ "is stale after successful upload.");
+ throw new Error("Symmetric key upload failed.");
+ }
+
+ // Doesn't matter if the timestamp is ahead.
+
+ // Download and install them.
+ let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
+ let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+ if (cryptoResp.status != 200) {
+ this._log.warn("Failed to download keys.");
+ throw new Error("Symmetric key download failed.");
+ }
+ let keysChanged = this.handleFetchedKeys(this.identity.syncKeyBundle,
+ cryptoKeys, true);
+ if (keysChanged) {
+ this._log.info("Downloaded keys differed, as expected.");
+ }
+ },
+
+ changePassword: function changePassword(newPassword) {
+ let client = new UserAPI10Client(this.userAPIURI);
+ let cb = Async.makeSpinningCallback();
+ client.changePassword(this.identity.username,
+ this.identity.basicPassword, newPassword, cb);
+
+ try {
+ cb.wait();
+ } catch (ex) {
+ this._log.debug("Password change failed", ex);
+ return false;
+ }
+
+ // Save the new password for requests and login manager.
+ this.identity.basicPassword = newPassword;
+ this.persistLogin();
+ return true;
+ },
+
+ changePassphrase: function changePassphrase(newphrase) {
+ return this._catch(function doChangePasphrase() {
+ /* Wipe. */
+ this.wipeServer();
+
+ this.logout();
+
+ /* Set this so UI is updated on next run. */
+ this.identity.syncKey = newphrase;
+ this.persistLogin();
+
+ /* We need to re-encrypt everything, so reset. */
+ this.resetClient();
+ this.collectionKeys.clear();
+
+ /* Login and sync. This also generates new keys. */
+ this.sync();
+
+ Svc.Obs.notify("weave:service:change-passphrase", true);
+
+ return true;
+ })();
+ },
+
+ startOver: function startOver() {
+ this._log.trace("Invoking Service.startOver.");
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ this.status.resetSync();
+
+ // Deletion doesn't make sense if we aren't set up yet!
+ if (this.clusterURL != "") {
+ // Clear client-specific data from the server, including disabled engines.
+ for (let engine of [this.clientsEngine].concat(this.engineManager.getAll())) {
+ try {
+ engine.removeClientData();
+ } catch(ex) {
+ this._log.warn(`Deleting client data for ${engine.name} failed`, ex);
+ }
+ }
+ this._log.debug("Finished deleting client data.");
+ } else {
+ this._log.debug("Skipping client data removal: no cluster URL.");
+ }
+
+ // We want let UI consumers of the following notification know as soon as
+ // possible, so let's fake for the CLIENT_NOT_CONFIGURED status for now
+ // by emptying the passphrase (we still need the password).
+ this._log.info("Service.startOver dropping sync key and logging out.");
+ this.identity.resetSyncKey();
+ this.status.login = LOGIN_FAILED_NO_PASSPHRASE;
+ this.logout();
+ Svc.Obs.notify("weave:service:start-over");
+
+ // Reset all engines and clear keys.
+ this.resetClient();
+ this.collectionKeys.clear();
+ this.status.resetBackoff();
+
+ // Reset Weave prefs.
+ this._ignorePrefObserver = true;
+ Svc.Prefs.resetBranch("");
+ this._ignorePrefObserver = false;
+ this.clusterURL = null;
+
+ Svc.Prefs.set("lastversion", WEAVE_VERSION);
+
+ this.identity.deleteSyncCredentials();
+
+ // If necessary, reset the identity manager, then re-initialize it so the
+ // FxA manager is used. This is configurable via a pref - mainly for tests.
+ let keepIdentity = false;
+ try {
+ keepIdentity = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity");
+ } catch (_) { /* no such pref */ }
+ if (keepIdentity) {
+ Svc.Obs.notify("weave:service:start-over:finish");
+ return;
+ }
+
+ try {
+ this.identity.finalize();
+ // an observer so the FxA migration code can take some action before
+ // the new identity is created.
+ Svc.Obs.notify("weave:service:start-over:init-identity");
+ this.identity.username = "";
+ this.status.__authManager = null;
+ this.identity = Status._authManager;
+ this._clusterManager = this.identity.createClusterManager(this);
+ Svc.Obs.notify("weave:service:start-over:finish");
+ } catch (err) {
+ this._log.error("startOver failed to re-initialize the identity manager: " + err);
+ // Still send the observer notification so the current state is
+ // reflected in the UI.
+ Svc.Obs.notify("weave:service:start-over:finish");
+ }
+ },
+
+ persistLogin: function persistLogin() {
+ try {
+ this.identity.persistCredentials(true);
+ } catch (ex) {
+ this._log.info("Unable to persist credentials: " + ex);
+ }
+ },
+
+ login: function login(username, password, passphrase) {
+ function onNotify() {
+ this._loggedIn = false;
+ if (Services.io.offline) {
+ this.status.login = LOGIN_FAILED_NETWORK_ERROR;
+ throw "Application is offline, login should not be called";
+ }
+
+ let initialStatus = this._checkSetup();
+ if (username) {
+ this.identity.username = username;
+ }
+ if (password) {
+ this.identity.basicPassword = password;
+ }
+ if (passphrase) {
+ this.identity.syncKey = passphrase;
+ }
+
+ if (this._checkSetup() == CLIENT_NOT_CONFIGURED) {
+ throw "Aborting login, client not configured.";
+ }
+
+ // Ask the identity manager to explicitly login now.
+ this._log.info("Logging in the user.");
+ let cb = Async.makeSpinningCallback();
+ this.identity.ensureLoggedIn().then(
+ () => cb(null),
+ err => cb(err || "ensureLoggedIn failed")
+ );
+
+ // Just let any errors bubble up - they've more context than we do!
+ cb.wait();
+
+ // Calling login() with parameters when the client was
+ // previously not configured means setup was completed.
+ if (initialStatus == CLIENT_NOT_CONFIGURED
+ && (username || password || passphrase)) {
+ Svc.Obs.notify("weave:service:setup-complete");
+ }
+ this._updateCachedURLs();
+
+ this._log.info("User logged in successfully - verifying login.");
+ if (!this.verifyLogin()) {
+ // verifyLogin sets the failure states here.
+ throw "Login failed: " + this.status.login;
+ }
+
+ this._loggedIn = true;
+
+ return true;
+ }
+
+ let notifier = this._notify("login", "", onNotify.bind(this));
+ return this._catch(this._lock("service.js: login", notifier))();
+ },
+
+ logout: function logout() {
+ // If we failed during login, we aren't going to have this._loggedIn set,
+ // but we still want to ask the identity to logout, so it doesn't try and
+ // reuse any old credentials next time we sync.
+ this._log.info("Logging out");
+ this.identity.logout();
+ this._loggedIn = false;
+
+ Svc.Obs.notify("weave:service:logout:finish");
+ },
+
+ checkAccount: function checkAccount(account) {
+ let client = new UserAPI10Client(this.userAPIURI);
+ let cb = Async.makeSpinningCallback();
+
+ let username = this.identity.usernameFromAccount(account);
+ client.usernameExists(username, cb);
+
+ try {
+ let exists = cb.wait();
+ return exists ? "notAvailable" : "available";
+ } catch (ex) {
+ // TODO fix API convention.
+ return this.errorHandler.errorStr(ex);
+ }
+ },
+
+ createAccount: function createAccount(email, password,
+ captchaChallenge, captchaResponse) {
+ let client = new UserAPI10Client(this.userAPIURI);
+
+ // Hint to server to allow scripted user creation or otherwise
+ // ignore captcha.
+ if (Svc.Prefs.isSet("admin-secret")) {
+ client.adminSecret = Svc.Prefs.get("admin-secret", "");
+ }
+
+ let cb = Async.makeSpinningCallback();
+
+ client.createAccount(email, password, captchaChallenge, captchaResponse,
+ cb);
+
+ try {
+ cb.wait();
+ return null;
+ } catch (ex) {
+ return this.errorHandler.errorStr(ex.body);
+ }
+ },
+
+ // Note: returns false if we failed for a reason other than the server not yet
+ // supporting the api.
+ _fetchServerConfiguration() {
+ // This is similar to _fetchInfo, but with different error handling.
+
+ let infoURL = this.userBaseURL + "info/configuration";
+ this._log.debug("Fetching server configuration", infoURL);
+ let configResponse;
+ try {
+ configResponse = this.resource(infoURL).get();
+ } catch (ex) {
+ // This is probably a network or similar error.
+ this._log.warn("Failed to fetch info/configuration", ex);
+ this.errorHandler.checkServerError(ex);
+ return false;
+ }
+
+ if (configResponse.status == 404) {
+ // This server doesn't support the URL yet - that's OK.
+ this._log.debug("info/configuration returned 404 - using default upload semantics");
+ } else if (configResponse.status != 200) {
+ this._log.warn(`info/configuration returned ${configResponse.status} - using default configuration`);
+ this.errorHandler.checkServerError(configResponse);
+ return false;
+ } else {
+ this.serverConfiguration = configResponse.obj;
+ }
+ this._log.trace("info/configuration for this server", this.serverConfiguration);
+ return true;
+ },
+
+ // Stuff we need to do after login, before we can really do
+ // anything (e.g. key setup).
+ _remoteSetup: function _remoteSetup(infoResponse) {
+ let reset = false;
+
+ if (!this._fetchServerConfiguration()) {
+ return false;
+ }
+
+ this._log.debug("Fetching global metadata record");
+ let meta = this.recordManager.get(this.metaURL);
+
+ // Checking modified time of the meta record.
+ if (infoResponse &&
+ (infoResponse.obj.meta != this.metaModified) &&
+ (!meta || !meta.isNew)) {
+
+ // Delete the cached meta record...
+ this._log.debug("Clearing cached meta record. metaModified is " +
+ JSON.stringify(this.metaModified) + ", setting to " +
+ JSON.stringify(infoResponse.obj.meta));
+
+ this.recordManager.del(this.metaURL);
+
+ // ... fetch the current record from the server, and COPY THE FLAGS.
+ let newMeta = this.recordManager.get(this.metaURL);
+
+ // If we got a 401, we do not want to create a new meta/global - we
+ // should be able to get the existing meta after we get a new node.
+ if (this.recordManager.response.status == 401) {
+ this._log.debug("Fetching meta/global record on the server returned 401.");
+ this.errorHandler.checkServerError(this.recordManager.response);
+ return false;
+ }
+
+ if (this.recordManager.response.status == 404) {
+ this._log.debug("No meta/global record on the server. Creating one.");
+ newMeta = new WBORecord("meta", "global");
+ newMeta.payload.syncID = this.syncID;
+ newMeta.payload.storageVersion = STORAGE_VERSION;
+ newMeta.payload.declined = this.engineManager.getDeclined();
+
+ newMeta.isNew = true;
+
+ this.recordManager.set(this.metaURL, newMeta);
+ let uploadRes = newMeta.upload(this.resource(this.metaURL));
+ if (!uploadRes.success) {
+ this._log.warn("Unable to upload new meta/global. Failing remote setup.");
+ this.errorHandler.checkServerError(uploadRes);
+ return false;
+ }
+ } else if (!newMeta) {
+ this._log.warn("Unable to get meta/global. Failing remote setup.");
+ this.errorHandler.checkServerError(this.recordManager.response);
+ return false;
+ } else {
+ // If newMeta, then it stands to reason that meta != null.
+ newMeta.isNew = meta.isNew;
+ newMeta.changed = meta.changed;
+ }
+
+ // Switch in the new meta object and record the new time.
+ meta = newMeta;
+ this.metaModified = infoResponse.obj.meta;
+ }
+
+ let remoteVersion = (meta && meta.payload.storageVersion)?
+ meta.payload.storageVersion : "";
+
+ this._log.debug(["Weave Version:", WEAVE_VERSION, "Local Storage:",
+ STORAGE_VERSION, "Remote Storage:", remoteVersion].join(" "));
+
+ // Check for cases that require a fresh start. When comparing remoteVersion,
+ // we need to convert it to a number as older clients used it as a string.
+ if (!meta || !meta.payload.storageVersion || !meta.payload.syncID ||
+ STORAGE_VERSION > parseFloat(remoteVersion)) {
+
+ this._log.info("One of: no meta, no meta storageVersion, or no meta syncID. Fresh start needed.");
+
+ // abort the server wipe if the GET status was anything other than 404 or 200
+ let status = this.recordManager.response.status;
+ if (status != 200 && status != 404) {
+ this.status.sync = METARECORD_DOWNLOAD_FAIL;
+ this.errorHandler.checkServerError(this.recordManager.response);
+ this._log.warn("Unknown error while downloading metadata record. " +
+ "Aborting sync.");
+ return false;
+ }
+
+ if (!meta)
+ this._log.info("No metadata record, server wipe needed");
+ if (meta && !meta.payload.syncID)
+ this._log.warn("No sync id, server wipe needed");
+
+ reset = true;
+
+ this._log.info("Wiping server data");
+ this._freshStart();
+
+ if (status == 404)
+ this._log.info("Metadata record not found, server was wiped to ensure " +
+ "consistency.");
+ else // 200
+ this._log.info("Wiped server; incompatible metadata: " + remoteVersion);
+
+ return true;
+ }
+ else if (remoteVersion > STORAGE_VERSION) {
+ this.status.sync = VERSION_OUT_OF_DATE;
+ this._log.warn("Upgrade required to access newer storage version.");
+ return false;
+ }
+ else if (meta.payload.syncID != this.syncID) {
+
+ this._log.info("Sync IDs differ. Local is " + this.syncID + ", remote is " + meta.payload.syncID);
+ this.resetClient();
+ this.collectionKeys.clear();
+ this.syncID = meta.payload.syncID;
+ this._log.debug("Clear cached values and take syncId: " + this.syncID);
+
+ if (!this.upgradeSyncKey(meta.payload.syncID)) {
+ this._log.warn("Failed to upgrade sync key. Failing remote setup.");
+ return false;
+ }
+
+ if (!this.verifyAndFetchSymmetricKeys(infoResponse)) {
+ this._log.warn("Failed to fetch symmetric keys. Failing remote setup.");
+ return false;
+ }
+
+ // bug 545725 - re-verify creds and fail sanely
+ if (!this.verifyLogin()) {
+ this.status.sync = CREDENTIALS_CHANGED;
+ this._log.info("Credentials have changed, aborting sync and forcing re-login.");
+ return false;
+ }
+
+ return true;
+ }
+ else {
+ if (!this.upgradeSyncKey(meta.payload.syncID)) {
+ this._log.warn("Failed to upgrade sync key. Failing remote setup.");
+ return false;
+ }
+
+ if (!this.verifyAndFetchSymmetricKeys(infoResponse)) {
+ this._log.warn("Failed to fetch symmetric keys. Failing remote setup.");
+ return false;
+ }
+
+ return true;
+ }
+ },
+
+ /**
+ * Return whether we should attempt login at the start of a sync.
+ *
+ * Note that this function has strong ties to _checkSync: callers
+ * of this function should typically use _checkSync to verify that
+ * any necessary login took place.
+ */
+ _shouldLogin: function _shouldLogin() {
+ return this.enabled &&
+ !Services.io.offline &&
+ !this.isLoggedIn;
+ },
+
+ /**
+ * Determine if a sync should run.
+ *
+ * @param ignore [optional]
+ * array of reasons to ignore when checking
+ *
+ * @return Reason for not syncing; not-truthy if sync should run
+ */
+ _checkSync: function _checkSync(ignore) {
+ let reason = "";
+ if (!this.enabled)
+ reason = kSyncWeaveDisabled;
+ else if (Services.io.offline)
+ reason = kSyncNetworkOffline;
+ else if (this.status.minimumNextSync > Date.now())
+ reason = kSyncBackoffNotMet;
+ else if ((this.status.login == MASTER_PASSWORD_LOCKED) &&
+ Utils.mpLocked())
+ reason = kSyncMasterPasswordLocked;
+ else if (Svc.Prefs.get("firstSync") == "notReady")
+ reason = kFirstSyncChoiceNotMade;
+
+ if (ignore && ignore.indexOf(reason) != -1)
+ return "";
+
+ return reason;
+ },
+
+ sync: function sync(engineNamesToSync) {
+ let dateStr = Utils.formatTimestamp(new Date());
+ this._log.debug("User-Agent: " + Utils.userAgent);
+ this._log.info("Starting sync at " + dateStr);
+ this._catch(function () {
+ // Make sure we're logged in.
+ if (this._shouldLogin()) {
+ this._log.debug("In sync: should login.");
+ if (!this.login()) {
+ this._log.debug("Not syncing: login returned false.");
+ return;
+ }
+ }
+ else {
+ this._log.trace("In sync: no need to login.");
+ }
+ return this._lockedSync(engineNamesToSync);
+ })();
+ },
+
+ /**
+ * Sync up engines with the server.
+ */
+ _lockedSync: function _lockedSync(engineNamesToSync) {
+ return this._lock("service.js: sync",
+ this._notify("sync", "", function onNotify() {
+
+ let histogram = Services.telemetry.getHistogramById("WEAVE_START_COUNT");
+ histogram.add(1);
+
+ let synchronizer = new EngineSynchronizer(this);
+ let cb = Async.makeSpinningCallback();
+ synchronizer.onComplete = cb;
+
+ synchronizer.sync(engineNamesToSync);
+ // wait() throws if the first argument is truthy, which is exactly what
+ // we want.
+ let result = cb.wait();
+
+ histogram = Services.telemetry.getHistogramById("WEAVE_COMPLETE_SUCCESS_COUNT");
+ histogram.add(1);
+
+ // We successfully synchronized.
+ // Check if the identity wants to pre-fetch a migration sentinel from
+ // the server.
+ // If we have no clusterURL, we are probably doing a node reassignment
+ // so don't attempt to get it in that case.
+ if (this.clusterURL) {
+ this.identity.prefetchMigrationSentinel(this);
+ }
+
+ // Now let's update our declined engines (but only if we have a metaURL;
+ // if Sync failed due to no node we will not have one)
+ if (this.metaURL) {
+ let meta = this.recordManager.get(this.metaURL);
+ if (!meta) {
+ this._log.warn("No meta/global; can't update declined state.");
+ return;
+ }
+
+ let declinedEngines = new DeclinedEngines(this);
+ let didChange = declinedEngines.updateDeclined(meta, this.engineManager);
+ if (!didChange) {
+ this._log.info("No change to declined engines. Not reuploading meta/global.");
+ return;
+ }
+
+ this.uploadMetaGlobal(meta);
+ }
+ }))();
+ },
+
+ /**
+ * Upload meta/global, throwing the response on failure.
+ */
+ uploadMetaGlobal: function (meta) {
+ this._log.debug("Uploading meta/global: " + JSON.stringify(meta));
+
+ // It would be good to set the X-If-Unmodified-Since header to `timestamp`
+ // for this PUT to ensure at least some level of transactionality.
+ // Unfortunately, the servers don't support it after a wipe right now
+ // (bug 693893), so we're going to defer this until bug 692700.
+ let res = this.resource(this.metaURL);
+ let response = res.put(meta);
+ if (!response.success) {
+ throw response;
+ }
+ this.recordManager.set(this.metaURL, meta);
+ },
+
+ /**
+ * Get a migration sentinel for the Firefox Accounts migration.
+ * Returns a JSON blob - it is up to callers of this to make sense of the
+ * data.
+ *
+ * Returns a promise that resolves with the sentinel, or null.
+ */
+ getFxAMigrationSentinel: function() {
+ if (this._shouldLogin()) {
+ this._log.debug("In getFxAMigrationSentinel: should login.");
+ if (!this.login()) {
+ this._log.debug("Can't get migration sentinel: login returned false.");
+ return Promise.resolve(null);
+ }
+ }
+ if (!this.identity.syncKeyBundle) {
+ this._log.error("Can't get migration sentinel: no syncKeyBundle.");
+ return Promise.resolve(null);
+ }
+ try {
+ let collectionURL = this.storageURL + "meta/fxa_credentials";
+ let cryptoWrapper = this.recordManager.get(collectionURL);
+ if (!cryptoWrapper || !cryptoWrapper.payload) {
+ // nothing to decrypt - .decrypt is noisy in that case, so just bail
+ // now.
+ return Promise.resolve(null);
+ }
+ // If the payload has a sentinel it means we must have put back the
+ // decrypted version last time we were called.
+ if (cryptoWrapper.payload.sentinel) {
+ return Promise.resolve(cryptoWrapper.payload.sentinel);
+ }
+ // If decryption fails it almost certainly means the key is wrong - but
+ // it's not clear if we need to take special action for that case?
+ let payload = cryptoWrapper.decrypt(this.identity.syncKeyBundle);
+ // After decrypting the ciphertext is lost, so we just stash the
+ // decrypted payload back into the wrapper.
+ cryptoWrapper.payload = payload;
+ return Promise.resolve(payload.sentinel);
+ } catch (ex) {
+ this._log.error("Failed to fetch the migration sentinel: ${}", ex);
+ return Promise.resolve(null);
+ }
+ },
+
+ /**
+ * Set a migration sentinel for the Firefox Accounts migration.
+ * Accepts a JSON blob - it is up to callers of this to make sense of the
+ * data.
+ *
+ * Returns a promise that resolves with a boolean which indicates if the
+ * sentinel was successfully written.
+ */
+ setFxAMigrationSentinel: function(sentinel) {
+ if (this._shouldLogin()) {
+ this._log.debug("In setFxAMigrationSentinel: should login.");
+ if (!this.login()) {
+ this._log.debug("Can't set migration sentinel: login returned false.");
+ return Promise.resolve(false);
+ }
+ }
+ if (!this.identity.syncKeyBundle) {
+ this._log.error("Can't set migration sentinel: no syncKeyBundle.");
+ return Promise.resolve(false);
+ }
+ try {
+ let collectionURL = this.storageURL + "meta/fxa_credentials";
+ let cryptoWrapper = new CryptoWrapper("meta", "fxa_credentials");
+ cryptoWrapper.cleartext.sentinel = sentinel;
+
+ cryptoWrapper.encrypt(this.identity.syncKeyBundle);
+
+ let res = this.resource(collectionURL);
+ let response = res.put(cryptoWrapper.toJSON());
+
+ if (!response.success) {
+ throw response;
+ }
+ this.recordManager.set(collectionURL, cryptoWrapper);
+ } catch (ex) {
+ this._log.error("Failed to set the migration sentinel: ${}", ex);
+ return Promise.resolve(false);
+ }
+ return Promise.resolve(true);
+ },
+
+ /**
+ * If we have a passphrase, rather than a 25-alphadigit sync key,
+ * use the provided sync ID to bootstrap it using PBKDF2.
+ *
+ * Store the new 'passphrase' back into the identity manager.
+ *
+ * We can check this as often as we want, because once it's done the
+ * check will no longer succeed. It only matters that it happens after
+ * we decide to bump the server storage version.
+ */
+ upgradeSyncKey: function upgradeSyncKey(syncID) {
+ let p = this.identity.syncKey;
+
+ if (!p) {
+ return false;
+ }
+
+ // Check whether it's already a key that we generated.
+ if (Utils.isPassphrase(p)) {
+ this._log.info("Sync key is up-to-date: no need to upgrade.");
+ return true;
+ }
+
+ // Otherwise, let's upgrade it.
+ // N.B., we persist the sync key without testing it first...
+
+ let s = btoa(syncID); // It's what WeaveCrypto expects. *sigh*
+ let k = Utils.derivePresentableKeyFromPassphrase(p, s, PBKDF2_KEY_BYTES); // Base 32.
+
+ if (!k) {
+ this._log.error("No key resulted from derivePresentableKeyFromPassphrase. Failing upgrade.");
+ return false;
+ }
+
+ this._log.info("Upgrading sync key...");
+ this.identity.syncKey = k;
+ this._log.info("Saving upgraded sync key...");
+ this.persistLogin();
+ this._log.info("Done saving.");
+ return true;
+ },
+
+ _freshStart: function _freshStart() {
+ this._log.info("Fresh start. Resetting client and considering key upgrade.");
+ this.resetClient();
+ this.collectionKeys.clear();
+ this.upgradeSyncKey(this.syncID);
+
+ // Wipe the server.
+ let wipeTimestamp = this.wipeServer();
+
+ // Upload a new meta/global record.
+ let meta = new WBORecord("meta", "global");
+ meta.payload.syncID = this.syncID;
+ meta.payload.storageVersion = STORAGE_VERSION;
+ meta.payload.declined = this.engineManager.getDeclined();
+ meta.isNew = true;
+
+ // uploadMetaGlobal throws on failure -- including race conditions.
+ // If we got into a race condition, we'll abort the sync this way, too.
+ // That's fine. We'll just wait till the next sync. The client that we're
+ // racing is probably busy uploading stuff right now anyway.
+ this.uploadMetaGlobal(meta);
+
+ // Wipe everything we know about except meta because we just uploaded it
+ let engines = [this.clientsEngine].concat(this.engineManager.getAll());
+ let collections = engines.map(engine => engine.name);
+ // TODO: there's a bug here. We should be calling resetClient, no?
+
+ // Generate, upload, and download new keys. Do this last so we don't wipe
+ // them...
+ this.generateNewSymmetricKeys();
+ },
+
+ /**
+ * Wipe user data from the server.
+ *
+ * @param collections [optional]
+ * Array of collections to wipe. If not given, all collections are
+ * wiped by issuing a DELETE request for `storageURL`.
+ *
+ * @return the server's timestamp of the (last) DELETE.
+ */
+ wipeServer: function wipeServer(collections) {
+ let response;
+ let histogram = Services.telemetry.getHistogramById("WEAVE_WIPE_SERVER_SUCCEEDED");
+ if (!collections) {
+ // Strip the trailing slash.
+ let res = this.resource(this.storageURL.slice(0, -1));
+ res.setHeader("X-Confirm-Delete", "1");
+ try {
+ response = res.delete();
+ } catch (ex) {
+ this._log.debug("Failed to wipe server", ex);
+ histogram.add(false);
+ throw ex;
+ }
+ if (response.status != 200 && response.status != 404) {
+ this._log.debug("Aborting wipeServer. Server responded with " +
+ response.status + " response for " + this.storageURL);
+ histogram.add(false);
+ throw response;
+ }
+ histogram.add(true);
+ return response.headers["x-weave-timestamp"];
+ }
+
+ let timestamp;
+ for (let name of collections) {
+ let url = this.storageURL + name;
+ try {
+ response = this.resource(url).delete();
+ } catch (ex) {
+ this._log.debug("Failed to wipe '" + name + "' collection", ex);
+ histogram.add(false);
+ throw ex;
+ }
+
+ if (response.status != 200 && response.status != 404) {
+ this._log.debug("Aborting wipeServer. Server responded with " +
+ response.status + " response for " + url);
+ histogram.add(false);
+ throw response;
+ }
+
+ if ("x-weave-timestamp" in response.headers) {
+ timestamp = response.headers["x-weave-timestamp"];
+ }
+ }
+ histogram.add(true);
+ return timestamp;
+ },
+
+ /**
+ * Wipe all local user data.
+ *
+ * @param engines [optional]
+ * Array of engine names to wipe. If not given, all engines are used.
+ */
+ wipeClient: function wipeClient(engines) {
+ // If we don't have any engines, reset the service and wipe all engines
+ if (!engines) {
+ // Clear out any service data
+ this.resetService();
+
+ engines = [this.clientsEngine].concat(this.engineManager.getAll());
+ }
+ // Convert the array of names into engines
+ else {
+ engines = this.engineManager.get(engines);
+ }
+
+ // Fully wipe each engine if it's able to decrypt data
+ for (let engine of engines) {
+ if (engine.canDecrypt()) {
+ engine.wipeClient();
+ }
+ }
+
+ // Save the password/passphrase just in-case they aren't restored by sync
+ this.persistLogin();
+ },
+
+ /**
+ * Wipe all remote user data by wiping the server then telling each remote
+ * client to wipe itself.
+ *
+ * @param engines [optional]
+ * Array of engine names to wipe. If not given, all engines are used.
+ */
+ wipeRemote: function wipeRemote(engines) {
+ try {
+ // Make sure stuff gets uploaded.
+ this.resetClient(engines);
+
+ // Clear out any server data.
+ this.wipeServer(engines);
+
+ // Only wipe the engines provided.
+ if (engines) {
+ engines.forEach(function(e) {
+ this.clientsEngine.sendCommand("wipeEngine", [e]);
+ }, this);
+ }
+ // Tell the remote machines to wipe themselves.
+ else {
+ this.clientsEngine.sendCommand("wipeAll", []);
+ }
+
+ // Make sure the changed clients get updated.
+ this.clientsEngine.sync();
+ } catch (ex) {
+ this.errorHandler.checkServerError(ex);
+ throw ex;
+ }
+ },
+
+ /**
+ * Reset local service information like logs, sync times, caches.
+ */
+ resetService: function resetService() {
+ this._catch(function reset() {
+ this._log.info("Service reset.");
+
+ // Pretend we've never synced to the server and drop cached data
+ this.syncID = "";
+ this.recordManager.clearCache();
+ })();
+ },
+
+ /**
+ * Reset the client by getting rid of any local server data and client data.
+ *
+ * @param engines [optional]
+ * Array of engine names to reset. If not given, all engines are used.
+ */
+ resetClient: function resetClient(engines) {
+ this._catch(function doResetClient() {
+ // If we don't have any engines, reset everything including the service
+ if (!engines) {
+ // Clear out any service data
+ this.resetService();
+
+ engines = [this.clientsEngine].concat(this.engineManager.getAll());
+ }
+ // Convert the array of names into engines
+ else {
+ engines = this.engineManager.get(engines);
+ }
+
+ // Have each engine drop any temporary meta data
+ for (let engine of engines) {
+ engine.resetClient();
+ }
+ })();
+ },
+
+ /**
+ * Fetch storage info from the server.
+ *
+ * @param type
+ * String specifying what info to fetch from the server. Must be one
+ * of the INFO_* values. See Sync Storage Server API spec for details.
+ * @param callback
+ * Callback function with signature (error, data) where `data' is
+ * the return value from the server already parsed as JSON.
+ *
+ * @return RESTRequest instance representing the request, allowing callers
+ * to cancel the request.
+ */
+ getStorageInfo: function getStorageInfo(type, callback) {
+ if (STORAGE_INFO_TYPES.indexOf(type) == -1) {
+ throw "Invalid value for 'type': " + type;
+ }
+
+ let info_type = "info/" + type;
+ this._log.trace("Retrieving '" + info_type + "'...");
+ let url = this.userBaseURL + info_type;
+ return this.getStorageRequest(url).get(function onComplete(error) {
+ // Note: 'this' is the request.
+ if (error) {
+ this._log.debug("Failed to retrieve '" + info_type + "'", error);
+ return callback(error);
+ }
+ if (this.response.status != 200) {
+ this._log.debug("Failed to retrieve '" + info_type +
+ "': server responded with HTTP" +
+ this.response.status);
+ return callback(this.response);
+ }
+
+ let result;
+ try {
+ result = JSON.parse(this.response.body);
+ } catch (ex) {
+ this._log.debug("Server returned invalid JSON for '" + info_type +
+ "': " + this.response.body);
+ return callback(ex);
+ }
+ this._log.trace("Successfully retrieved '" + info_type + "'.");
+ return callback(null, result);
+ });
+ },
+};
+
+this.Service = new Sync11Service();
+Service.onStartup();
diff --git a/services/sync/modules/stages/cluster.js b/services/sync/modules/stages/cluster.js
new file mode 100644
index 000000000..7665ce825
--- /dev/null
+++ b/services/sync/modules/stages/cluster.js
@@ -0,0 +1,113 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["ClusterManager"];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/util.js");
+
+/**
+ * Contains code for managing the Sync cluster we are in.
+ */
+this.ClusterManager = function ClusterManager(service) {
+ this._log = Log.repository.getLogger("Sync.Service");
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.service.main")];
+
+ this.service = service;
+}
+ClusterManager.prototype = {
+ get identity() {
+ return this.service.identity;
+ },
+
+ /**
+ * Obtain the cluster for the current user.
+ *
+ * Returns the string URL of the cluster or null on error.
+ */
+ _findCluster: function _findCluster() {
+ this._log.debug("Finding cluster for user " + this.identity.username);
+
+ // This should ideally use UserAPI10Client but the legacy hackiness is
+ // strong with this code.
+ let fail;
+ let url = this.service.userAPIURI + this.identity.username + "/node/weave";
+ let res = this.service.resource(url);
+ try {
+ let node = res.get();
+ switch (node.status) {
+ case 400:
+ this.service.status.login = LOGIN_FAILED_LOGIN_REJECTED;
+ fail = "Find cluster denied: " + this.service.errorHandler.errorStr(node);
+ break;
+ case 404:
+ this._log.debug("Using serverURL as data cluster (multi-cluster support disabled)");
+ return this.service.serverURL;
+ case 0:
+ case 200:
+ if (node == "null") {
+ node = null;
+ }
+ this._log.trace("_findCluster successfully returning " + node);
+ return node;
+ default:
+ this.service.errorHandler.checkServerError(node);
+ fail = "Unexpected response code: " + node.status;
+ break;
+ }
+ } catch (e) {
+ this._log.debug("Network error on findCluster");
+ this.service.status.login = LOGIN_FAILED_NETWORK_ERROR;
+ this.service.errorHandler.checkServerError(e);
+ fail = e;
+ }
+ throw fail;
+ },
+
+ /**
+ * Determine the cluster for the current user and update state.
+ */
+ setCluster: function setCluster() {
+ // Make sure we didn't get some unexpected response for the cluster.
+ let cluster = this._findCluster();
+ this._log.debug("Cluster value = " + cluster);
+ if (cluster == null) {
+ return false;
+ }
+
+ // Convert from the funky "String object with additional properties" that
+ // resource.js returns to a plain-old string.
+ cluster = cluster.toString();
+ // Don't update stuff if we already have the right cluster
+ if (cluster == this.service.clusterURL) {
+ return false;
+ }
+
+ this._log.debug("Setting cluster to " + cluster);
+ this.service.clusterURL = cluster;
+
+ return true;
+ },
+
+ getUserBaseURL: function getUserBaseURL() {
+ // Legacy Sync and FxA Sync construct the userBaseURL differently. Legacy
+ // Sync appends path components onto an empty path, and in FxA Sync, the
+ // token server constructs this for us in an opaque manner. Since the
+ // cluster manager already sets the clusterURL on Service and also has
+ // access to the current identity, we added this functionality here.
+
+ // If the clusterURL hasn't been set, the userBaseURL shouldn't be set
+ // either. Some tests expect "undefined" to be returned here.
+ if (!this.service.clusterURL) {
+ return undefined;
+ }
+ let storageAPI = this.service.clusterURL + SYNC_API_VERSION + "/";
+ return storageAPI + this.identity.username + "/";
+ }
+};
+Object.freeze(ClusterManager.prototype);
diff --git a/services/sync/modules/stages/declined.js b/services/sync/modules/stages/declined.js
new file mode 100644
index 000000000..ff8a14181
--- /dev/null
+++ b/services/sync/modules/stages/declined.js
@@ -0,0 +1,76 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * This file contains code for maintaining the set of declined engines,
+ * in conjunction with EngineManager.
+ */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = ["DeclinedEngines"];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://gre/modules/Preferences.jsm");
+
+
+
+this.DeclinedEngines = function (service) {
+ this._log = Log.repository.getLogger("Sync.Declined");
+ this._log.level = Log.Level[new Preferences(PREFS_BRANCH).get("log.logger.declined")];
+
+ this.service = service;
+}
+this.DeclinedEngines.prototype = {
+ updateDeclined: function (meta, engineManager=this.service.engineManager) {
+ let enabled = new Set(engineManager.getEnabled().map(e => e.name));
+ let known = new Set(engineManager.getAll().map(e => e.name));
+ let remoteDeclined = new Set(meta.payload.declined || []);
+ let localDeclined = new Set(engineManager.getDeclined());
+
+ this._log.debug("Handling remote declined: " + JSON.stringify([...remoteDeclined]));
+ this._log.debug("Handling local declined: " + JSON.stringify([...localDeclined]));
+
+ // Any engines that are locally enabled should be removed from the remote
+ // declined list.
+ //
+ // Any engines that are locally declined should be added to the remote
+ // declined list.
+ let newDeclined = CommonUtils.union(localDeclined, CommonUtils.difference(remoteDeclined, enabled));
+
+ // If our declined set has changed, put it into the meta object and mark
+ // it as changed.
+ let declinedChanged = !CommonUtils.setEqual(newDeclined, remoteDeclined);
+ this._log.debug("Declined changed? " + declinedChanged);
+ if (declinedChanged) {
+ meta.changed = true;
+ meta.payload.declined = [...newDeclined];
+ }
+
+ // Update the engine manager regardless.
+ engineManager.setDeclined(newDeclined);
+
+ // Any engines that are locally known, locally disabled, and not remotely
+ // or locally declined, are candidates for enablement.
+ let undecided = CommonUtils.difference(CommonUtils.difference(known, enabled), newDeclined);
+ if (undecided.size) {
+ let subject = {
+ declined: newDeclined,
+ enabled: enabled,
+ known: known,
+ undecided: undecided,
+ };
+ CommonUtils.nextTick(() => {
+ Observers.notify("weave:engines:notdeclined", subject);
+ });
+ }
+
+ return declinedChanged;
+ },
+};
diff --git a/services/sync/modules/stages/enginesync.js b/services/sync/modules/stages/enginesync.js
new file mode 100644
index 000000000..a00a2f48b
--- /dev/null
+++ b/services/sync/modules/stages/enginesync.js
@@ -0,0 +1,449 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * This file contains code for synchronizing engines.
+ */
+
+this.EXPORTED_SYMBOLS = ["EngineSynchronizer"];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://gre/modules/Task.jsm");
+
+/**
+ * Perform synchronization of engines.
+ *
+ * This was originally split out of service.js. The API needs lots of love.
+ */
+this.EngineSynchronizer = function EngineSynchronizer(service) {
+ this._log = Log.repository.getLogger("Sync.Synchronizer");
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.synchronizer")];
+
+ this.service = service;
+
+ this.onComplete = null;
+}
+
+EngineSynchronizer.prototype = {
+ sync: function sync(engineNamesToSync) {
+ if (!this.onComplete) {
+ throw new Error("onComplete handler not installed.");
+ }
+
+ let startTime = Date.now();
+
+ this.service.status.resetSync();
+
+ // Make sure we should sync or record why we shouldn't.
+ let reason = this.service._checkSync();
+ if (reason) {
+ if (reason == kSyncNetworkOffline) {
+ this.service.status.sync = LOGIN_FAILED_NETWORK_ERROR;
+ }
+
+ // this is a purposeful abort rather than a failure, so don't set
+ // any status bits
+ reason = "Can't sync: " + reason;
+ this.onComplete(new Error("Can't sync: " + reason));
+ return;
+ }
+
+ // If we don't have a node, get one. If that fails, retry in 10 minutes.
+ if (!this.service.clusterURL && !this.service._clusterManager.setCluster()) {
+ this.service.status.sync = NO_SYNC_NODE_FOUND;
+ this._log.info("No cluster URL found. Cannot sync.");
+ this.onComplete(null);
+ return;
+ }
+
+ // Ping the server with a special info request once a day.
+ let infoURL = this.service.infoURL;
+ let now = Math.floor(Date.now() / 1000);
+ let lastPing = Svc.Prefs.get("lastPing", 0);
+ if (now - lastPing > 86400) { // 60 * 60 * 24
+ infoURL += "?v=" + WEAVE_VERSION;
+ Svc.Prefs.set("lastPing", now);
+ }
+
+ let engineManager = this.service.engineManager;
+
+ // Figure out what the last modified time is for each collection
+ let info = this.service._fetchInfo(infoURL);
+
+ // Convert the response to an object and read out the modified times
+ for (let engine of [this.service.clientsEngine].concat(engineManager.getAll())) {
+ engine.lastModified = info.obj[engine.name] || 0;
+ }
+
+ if (!(this.service._remoteSetup(info))) {
+ this.onComplete(new Error("Aborting sync, remote setup failed"));
+ return;
+ }
+
+ // Make sure we have an up-to-date list of clients before sending commands
+ this._log.debug("Refreshing client list.");
+ if (!this._syncEngine(this.service.clientsEngine)) {
+ // Clients is an engine like any other; it can fail with a 401,
+ // and we can elect to abort the sync.
+ this._log.warn("Client engine sync failed. Aborting.");
+ this.onComplete(null);
+ return;
+ }
+
+ // We only honor the "hint" of what engines to Sync if this isn't
+ // a first sync.
+ let allowEnginesHint = false;
+ // Wipe data in the desired direction if necessary
+ switch (Svc.Prefs.get("firstSync")) {
+ case "resetClient":
+ this.service.resetClient(engineManager.enabledEngineNames);
+ break;
+ case "wipeClient":
+ this.service.wipeClient(engineManager.enabledEngineNames);
+ break;
+ case "wipeRemote":
+ this.service.wipeRemote(engineManager.enabledEngineNames);
+ break;
+ default:
+ allowEnginesHint = true;
+ break;
+ }
+
+ if (this.service.clientsEngine.localCommands) {
+ try {
+ if (!(this.service.clientsEngine.processIncomingCommands())) {
+ this.service.status.sync = ABORT_SYNC_COMMAND;
+ this.onComplete(new Error("Processed command aborted sync."));
+ return;
+ }
+
+ // Repeat remoteSetup in-case the commands forced us to reset
+ if (!(this.service._remoteSetup(info))) {
+ this.onComplete(new Error("Remote setup failed after processing commands."));
+ return;
+ }
+ }
+ finally {
+ // Always immediately attempt to push back the local client (now
+ // without commands).
+ // Note that we don't abort here; if there's a 401 because we've
+ // been reassigned, we'll handle it around another engine.
+ this._syncEngine(this.service.clientsEngine);
+ }
+ }
+
+ // Update engines because it might change what we sync.
+ try {
+ this._updateEnabledEngines();
+ } catch (ex) {
+ this._log.debug("Updating enabled engines failed", ex);
+ this.service.errorHandler.checkServerError(ex);
+ this.onComplete(ex);
+ return;
+ }
+
+ // If the engines to sync has been specified, we sync in the order specified.
+ let enginesToSync;
+ if (allowEnginesHint && engineNamesToSync) {
+ this._log.info("Syncing specified engines", engineNamesToSync);
+ enginesToSync = engineManager.get(engineNamesToSync).filter(e => e.enabled);
+ } else {
+ this._log.info("Syncing all enabled engines.");
+ enginesToSync = engineManager.getEnabled();
+ }
+ try {
+ // We don't bother validating engines that failed to sync.
+ let enginesToValidate = [];
+ for (let engine of enginesToSync) {
+ // If there's any problems with syncing the engine, report the failure
+ if (!(this._syncEngine(engine)) || this.service.status.enforceBackoff) {
+ this._log.info("Aborting sync for failure in " + engine.name);
+ break;
+ }
+ enginesToValidate.push(engine);
+ }
+
+ // If _syncEngine fails for a 401, we might not have a cluster URL here.
+ // If that's the case, break out of this immediately, rather than
+ // throwing an exception when trying to fetch metaURL.
+ if (!this.service.clusterURL) {
+ this._log.debug("Aborting sync, no cluster URL: " +
+ "not uploading new meta/global.");
+ this.onComplete(null);
+ return;
+ }
+
+ // Upload meta/global if any engines changed anything.
+ let meta = this.service.recordManager.get(this.service.metaURL);
+ if (meta.isNew || meta.changed) {
+ this._log.info("meta/global changed locally: reuploading.");
+ try {
+ this.service.uploadMetaGlobal(meta);
+ delete meta.isNew;
+ delete meta.changed;
+ } catch (error) {
+ this._log.error("Unable to upload meta/global. Leaving marked as new.");
+ }
+ }
+
+ Async.promiseSpinningly(this._tryValidateEngines(enginesToValidate));
+
+ // If there were no sync engine failures
+ if (this.service.status.service != SYNC_FAILED_PARTIAL) {
+ Svc.Prefs.set("lastSync", new Date().toString());
+ this.service.status.sync = SYNC_SUCCEEDED;
+ }
+ } finally {
+ Svc.Prefs.reset("firstSync");
+
+ let syncTime = ((Date.now() - startTime) / 1000).toFixed(2);
+ let dateStr = Utils.formatTimestamp(new Date());
+ this._log.info("Sync completed at " + dateStr
+ + " after " + syncTime + " secs.");
+ }
+
+ this.onComplete(null);
+ },
+
+ _tryValidateEngines: Task.async(function* (recentlySyncedEngines) {
+ if (!Services.telemetry.canRecordBase || !Svc.Prefs.get("validation.enabled", false)) {
+ this._log.info("Skipping validation: validation or telemetry reporting is disabled");
+ return;
+ }
+
+ let lastValidation = Svc.Prefs.get("validation.lastTime", 0);
+ let validationInterval = Svc.Prefs.get("validation.interval");
+ let nowSeconds = Math.floor(Date.now() / 1000);
+
+ if (nowSeconds - lastValidation < validationInterval) {
+ this._log.info("Skipping validation: too recent since last validation attempt");
+ return;
+ }
+ // Update the time now, even if we may return false still. We don't want to
+ // check the rest of these more frequently than once a day.
+ Svc.Prefs.set("validation.lastTime", nowSeconds);
+
+ // Validation only occurs a certain percentage of the time.
+ let validationProbability = Svc.Prefs.get("validation.percentageChance", 0) / 100.0;
+ if (validationProbability < Math.random()) {
+ this._log.info("Skipping validation: Probability threshold not met");
+ return;
+ }
+ let maxRecords = Svc.Prefs.get("validation.maxRecords");
+ if (!maxRecords) {
+ // Don't bother asking the server for the counts if we know validation
+ // won't happen anyway.
+ return;
+ }
+
+ // maxRecords of -1 means "any number", so we can skip asking the server.
+ // Used for tests.
+ let info;
+ if (maxRecords < 0) {
+ info = {};
+ for (let e of recentlySyncedEngines) {
+ info[e.name] = 1; // needs to be < maxRecords
+ }
+ maxRecords = 2;
+ } else {
+
+ let collectionCountsURL = this.service.userBaseURL + "info/collection_counts";
+ try {
+ let infoResp = this.service._fetchInfo(collectionCountsURL);
+ if (!infoResp.success) {
+ this._log.error("Can't run validation: request to info/collection_counts responded with "
+ + resp.status);
+ return;
+ }
+ info = infoResp.obj; // might throw because obj is a getter which parses json.
+ } catch (e) {
+ // Not running validation is totally fine, so we just write an error log and return.
+ this._log.error("Can't run validation: Caught error when fetching counts", e);
+ return;
+ }
+ }
+
+ if (!info) {
+ return;
+ }
+
+ let engineLookup = new Map(recentlySyncedEngines.map(e => [e.name, e]));
+ let toRun = [];
+ for (let [engineName, recordCount] of Object.entries(info)) {
+ let engine = engineLookup.get(engineName);
+ if (recordCount > maxRecords || !engine) {
+ this._log.debug(`Skipping validation for ${engineName} because it's not an engine or ` +
+ `the number of records (${recordCount}) is greater than the maximum allowed (${maxRecords}).`);
+ continue;
+ }
+ let validator = engine.getValidator();
+ if (!validator) {
+ continue;
+ }
+ // Put this in an array so that we know how many we're going to do, so we
+ // don't tell users we're going to run some validators when we aren't.
+ toRun.push({ engine, validator });
+ }
+
+ if (!toRun.length) {
+ return;
+ }
+ Services.console.logStringMessage(
+ "Sync is about to run a consistency check. This may be slow, and " +
+ "can be controlled using the pref \"services.sync.validation.enabled\".\n" +
+ "If you encounter any problems because of this, please file a bug.");
+ for (let { validator, engine } of toRun) {
+ try {
+ let result = yield validator.validate(engine);
+ Observers.notify("weave:engine:validate:finish", result, engine.name);
+ } catch (e) {
+ this._log.error(`Failed to run validation on ${engine.name}!`, e);
+ Observers.notify("weave:engine:validate:error", e, engine.name)
+ // Keep validating -- there's no reason to think that a failure for one
+ // validator would mean the others will fail.
+ }
+ }
+ }),
+
+ // Returns true if sync should proceed.
+ // false / no return value means sync should be aborted.
+ _syncEngine: function _syncEngine(engine) {
+ try {
+ engine.sync();
+ }
+ catch(e) {
+ if (e.status == 401) {
+ // Maybe a 401, cluster update perhaps needed?
+ // We rely on ErrorHandler observing the sync failure notification to
+ // schedule another sync and clear node assignment values.
+ // Here we simply want to muffle the exception and return an
+ // appropriate value.
+ return false;
+ }
+ }
+
+ return true;
+ },
+
+ _updateEnabledFromMeta: function (meta, numClients, engineManager=this.service.engineManager) {
+ this._log.info("Updating enabled engines: " +
+ numClients + " clients.");
+
+ if (meta.isNew || !meta.payload.engines) {
+ this._log.debug("meta/global isn't new, or is missing engines. Not updating enabled state.");
+ return;
+ }
+
+ // If we're the only client, and no engines are marked as enabled,
+ // thumb our noses at the server data: it can't be right.
+ // Belt-and-suspenders approach to Bug 615926.
+ let hasEnabledEngines = false;
+ for (let e in meta.payload.engines) {
+ if (e != "clients") {
+ hasEnabledEngines = true;
+ break;
+ }
+ }
+
+ if ((numClients <= 1) && !hasEnabledEngines) {
+ this._log.info("One client and no enabled engines: not touching local engine status.");
+ return;
+ }
+
+ this.service._ignorePrefObserver = true;
+
+ let enabled = engineManager.enabledEngineNames;
+
+ let toDecline = new Set();
+ let toUndecline = new Set();
+
+ for (let engineName in meta.payload.engines) {
+ if (engineName == "clients") {
+ // Clients is special.
+ continue;
+ }
+ let index = enabled.indexOf(engineName);
+ if (index != -1) {
+ // The engine is enabled locally. Nothing to do.
+ enabled.splice(index, 1);
+ continue;
+ }
+ let engine = engineManager.get(engineName);
+ if (!engine) {
+ // The engine doesn't exist locally. Nothing to do.
+ continue;
+ }
+
+ let attemptedEnable = false;
+ // If the engine was enabled remotely, enable it locally.
+ if (!Svc.Prefs.get("engineStatusChanged." + engine.prefName, false)) {
+ this._log.trace("Engine " + engineName + " was enabled. Marking as non-declined.");
+ toUndecline.add(engineName);
+ this._log.trace(engineName + " engine was enabled remotely.");
+ engine.enabled = true;
+ // Note that setting engine.enabled to true might not have worked for
+ // the password engine if a master-password is enabled. However, it's
+ // still OK that we added it to undeclined - the user *tried* to enable
+ // it remotely - so it still winds up as not being flagged as declined
+ // even though it's disabled remotely.
+ attemptedEnable = true;
+ }
+
+ // If either the engine was disabled locally or enabling the engine
+ // failed (see above re master-password) then wipe server data and
+ // disable it everywhere.
+ if (!engine.enabled) {
+ this._log.trace("Wiping data for " + engineName + " engine.");
+ engine.wipeServer();
+ delete meta.payload.engines[engineName];
+ meta.changed = true; // the new enabled state must propagate
+ // We also here mark the engine as declined, because the pref
+ // was explicitly changed to false - unless we tried, and failed,
+ // to enable it - in which case we leave the declined state alone.
+ if (!attemptedEnable) {
+ // This will be reflected in meta/global in the next stage.
+ this._log.trace("Engine " + engineName + " was disabled locally. Marking as declined.");
+ toDecline.add(engineName);
+ }
+ }
+ }
+
+ // Any remaining engines were either enabled locally or disabled remotely.
+ for (let engineName of enabled) {
+ let engine = engineManager.get(engineName);
+ if (Svc.Prefs.get("engineStatusChanged." + engine.prefName, false)) {
+ this._log.trace("The " + engineName + " engine was enabled locally.");
+ toUndecline.add(engineName);
+ } else {
+ this._log.trace("The " + engineName + " engine was disabled remotely.");
+
+ // Don't automatically mark it as declined!
+ engine.enabled = false;
+ }
+ }
+
+ engineManager.decline(toDecline);
+ engineManager.undecline(toUndecline);
+
+ Svc.Prefs.resetBranch("engineStatusChanged.");
+ this.service._ignorePrefObserver = false;
+ },
+
+ _updateEnabledEngines: function () {
+ let meta = this.service.recordManager.get(this.service.metaURL);
+ let numClients = this.service.scheduler.numClients;
+ let engineManager = this.service.engineManager;
+
+ this._updateEnabledFromMeta(meta, numClients, engineManager);
+ },
+};
+Object.freeze(EngineSynchronizer.prototype);
diff --git a/services/sync/modules/status.js b/services/sync/modules/status.js
new file mode 100644
index 000000000..100bc7965
--- /dev/null
+++ b/services/sync/modules/status.js
@@ -0,0 +1,145 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+* License, v. 2.0. If a copy of the MPL was not distributed with this
+* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["Status"];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cr = Components.results;
+var Cu = Components.utils;
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://services-common/async.js");
+
+this.Status = {
+ _log: Log.repository.getLogger("Sync.Status"),
+ __authManager: null,
+ ready: false,
+
+ get _authManager() {
+ if (this.__authManager) {
+ return this.__authManager;
+ }
+ let service = Components.classes["@mozilla.org/weave/service;1"]
+ .getService(Components.interfaces.nsISupports)
+ .wrappedJSObject;
+ let idClass = service.fxAccountsEnabled ? BrowserIDManager : IdentityManager;
+ this.__authManager = new idClass();
+ this.__authManager.initialize();
+ return this.__authManager;
+ },
+
+ get service() {
+ return this._service;
+ },
+
+ set service(code) {
+ this._log.debug("Status.service: " + (this._service || undefined) + " => " + code);
+ this._service = code;
+ },
+
+ get login() {
+ return this._login;
+ },
+
+ set login(code) {
+ this._log.debug("Status.login: " + this._login + " => " + code);
+ this._login = code;
+
+ if (code == LOGIN_FAILED_NO_USERNAME ||
+ code == LOGIN_FAILED_NO_PASSWORD ||
+ code == LOGIN_FAILED_NO_PASSPHRASE) {
+ this.service = CLIENT_NOT_CONFIGURED;
+ } else if (code != LOGIN_SUCCEEDED) {
+ this.service = LOGIN_FAILED;
+ } else {
+ this.service = STATUS_OK;
+ }
+ },
+
+ get sync() {
+ return this._sync;
+ },
+
+ set sync(code) {
+ this._log.debug("Status.sync: " + this._sync + " => " + code);
+ this._sync = code;
+ this.service = code == SYNC_SUCCEEDED ? STATUS_OK : SYNC_FAILED;
+ },
+
+ get eol() {
+ let modePref = PREFS_BRANCH + "errorhandler.alert.mode";
+ try {
+ return Services.prefs.getCharPref(modePref) == "hard-eol";
+ } catch (ex) {
+ return false;
+ }
+ },
+
+ get engines() {
+ return this._engines;
+ },
+
+ set engines([name, code]) {
+ this._log.debug("Status for engine " + name + ": " + code);
+ this._engines[name] = code;
+
+ if (code != ENGINE_SUCCEEDED) {
+ this.service = SYNC_FAILED_PARTIAL;
+ }
+ },
+
+ // Implement toString because adding a logger introduces a cyclic object
+ // value, so we can't trivially debug-print Status as JSON.
+ toString: function toString() {
+ return "<Status" +
+ ": login: " + Status.login +
+ ", service: " + Status.service +
+ ", sync: " + Status.sync + ">";
+ },
+
+ checkSetup: function checkSetup() {
+ let result = this._authManager.currentAuthState;
+ if (result == STATUS_OK) {
+ Status.service = result;
+ return result;
+ }
+
+ Status.login = result;
+ return Status.service;
+ },
+
+ resetBackoff: function resetBackoff() {
+ this.enforceBackoff = false;
+ this.backoffInterval = 0;
+ this.minimumNextSync = 0;
+ },
+
+ resetSync: function resetSync() {
+ // Logger setup.
+ let logPref = PREFS_BRANCH + "log.logger.status";
+ let logLevel = "Trace";
+ try {
+ logLevel = Services.prefs.getCharPref(logPref);
+ } catch (ex) {
+ // Use default.
+ }
+ this._log.level = Log.Level[logLevel];
+
+ this._log.info("Resetting Status.");
+ this.service = STATUS_OK;
+ this._login = LOGIN_SUCCEEDED;
+ this._sync = SYNC_SUCCEEDED;
+ this._engines = {};
+ this.partial = false;
+ }
+};
+
+// Initialize various status values.
+Status.resetBackoff();
+Status.resetSync();
diff --git a/services/sync/modules/telemetry.js b/services/sync/modules/telemetry.js
new file mode 100644
index 000000000..c311387f7
--- /dev/null
+++ b/services/sync/modules/telemetry.js
@@ -0,0 +1,578 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+this.EXPORTED_SYMBOLS = ["SyncTelemetry"];
+
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://services-sync/main.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/TelemetryController.jsm");
+Cu.import("resource://gre/modules/FxAccounts.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/osfile.jsm", this);
+
+let constants = {};
+Cu.import("resource://services-sync/constants.js", constants);
+
+var fxAccountsCommon = {};
+Cu.import("resource://gre/modules/FxAccountsCommon.js", fxAccountsCommon);
+
+XPCOMUtils.defineLazyServiceGetter(this, "Telemetry",
+ "@mozilla.org/base/telemetry;1",
+ "nsITelemetry");
+
+const log = Log.repository.getLogger("Sync.Telemetry");
+
+const TOPICS = [
+ "profile-before-change",
+ "weave:service:sync:start",
+ "weave:service:sync:finish",
+ "weave:service:sync:error",
+
+ "weave:engine:sync:start",
+ "weave:engine:sync:finish",
+ "weave:engine:sync:error",
+ "weave:engine:sync:applied",
+ "weave:engine:sync:uploaded",
+ "weave:engine:validate:finish",
+ "weave:engine:validate:error",
+];
+
+const PING_FORMAT_VERSION = 1;
+
+// The set of engines we record telemetry for - any other engines are ignored.
+const ENGINES = new Set(["addons", "bookmarks", "clients", "forms", "history",
+ "passwords", "prefs", "tabs", "extension-storage"]);
+
+// A regex we can use to replace the profile dir in error messages. We use a
+// regexp so we can simply replace all case-insensitive occurences.
+// This escaping function is from:
+// https://developer.mozilla.org/en/docs/Web/JavaScript/Guide/Regular_Expressions
+const reProfileDir = new RegExp(
+ OS.Constants.Path.profileDir.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"),
+ "gi");
+
+function transformError(error, engineName) {
+ if (Async.isShutdownException(error)) {
+ return { name: "shutdownerror" };
+ }
+
+ if (typeof error === "string") {
+ if (error.startsWith("error.")) {
+ // This is hacky, but I can't imagine that it's not also accurate.
+ return { name: "othererror", error };
+ }
+ // There's a chance the profiledir is in the error string which is PII we
+ // want to avoid including in the ping.
+ error = error.replace(reProfileDir, "[profileDir]");
+ return { name: "unexpectederror", error };
+ }
+
+ if (error.failureCode) {
+ return { name: "othererror", error: error.failureCode };
+ }
+
+ if (error instanceof AuthenticationError) {
+ return { name: "autherror", from: error.source };
+ }
+
+ if (error instanceof Ci.mozIStorageError) {
+ return { name: "sqlerror", code: error.result };
+ }
+
+ let httpCode = error.status ||
+ (error.response && error.response.status) ||
+ error.code;
+
+ if (httpCode) {
+ return { name: "httperror", code: httpCode };
+ }
+
+ if (error.result) {
+ return { name: "nserror", code: error.result };
+ }
+
+ return {
+ name: "unexpectederror",
+ // as above, remove the profile dir value.
+ error: String(error).replace(reProfileDir, "[profileDir]")
+ }
+}
+
+function tryGetMonotonicTimestamp() {
+ try {
+ return Telemetry.msSinceProcessStart();
+ } catch (e) {
+ log.warn("Unable to get a monotonic timestamp!");
+ return -1;
+ }
+}
+
+function timeDeltaFrom(monotonicStartTime) {
+ let now = tryGetMonotonicTimestamp();
+ if (monotonicStartTime !== -1 && now !== -1) {
+ return Math.round(now - monotonicStartTime);
+ }
+ return -1;
+}
+
+class EngineRecord {
+ constructor(name) {
+ // startTime is in ms from process start, but is monotonic (unlike Date.now())
+ // so we need to keep both it and when.
+ this.startTime = tryGetMonotonicTimestamp();
+ this.name = name;
+ }
+
+ toJSON() {
+ let result = Object.assign({}, this);
+ delete result.startTime;
+ return result;
+ }
+
+ finished(error) {
+ let took = timeDeltaFrom(this.startTime);
+ if (took > 0) {
+ this.took = took;
+ }
+ if (error) {
+ this.failureReason = transformError(error, this.name);
+ }
+ }
+
+ recordApplied(counts) {
+ if (this.incoming) {
+ log.error(`Incoming records applied multiple times for engine ${this.name}!`);
+ return;
+ }
+ if (this.name === "clients" && !counts.failed) {
+ // ignore successful application of client records
+ // since otherwise they show up every time and are meaningless.
+ return;
+ }
+
+ let incomingData = {};
+ let properties = ["applied", "failed", "newFailed", "reconciled"];
+ // Only record non-zero properties and only record incoming at all if
+ // there's at least one property we care about.
+ for (let property of properties) {
+ if (counts[property]) {
+ incomingData[property] = counts[property];
+ this.incoming = incomingData;
+ }
+ }
+ }
+
+ recordValidation(validationResult) {
+ if (this.validation) {
+ log.error(`Multiple validations occurred for engine ${this.name}!`);
+ return;
+ }
+ let { problems, version, duration, recordCount } = validationResult;
+ let validation = {
+ version: version || 0,
+ checked: recordCount || 0,
+ };
+ if (duration > 0) {
+ validation.took = Math.round(duration);
+ }
+ let summarized = problems.getSummary(true).filter(({count}) => count > 0);
+ if (summarized.length) {
+ validation.problems = summarized;
+ }
+ this.validation = validation;
+ }
+
+ recordValidationError(e) {
+ if (this.validation) {
+ log.error(`Multiple validations occurred for engine ${this.name}!`);
+ return;
+ }
+
+ this.validation = {
+ failureReason: transformError(e)
+ };
+ }
+
+ recordUploaded(counts) {
+ if (counts.sent || counts.failed) {
+ if (!this.outgoing) {
+ this.outgoing = [];
+ }
+ this.outgoing.push({
+ sent: counts.sent || undefined,
+ failed: counts.failed || undefined,
+ });
+ }
+ }
+}
+
+class TelemetryRecord {
+ constructor(allowedEngines) {
+ this.allowedEngines = allowedEngines;
+ // Our failure reason. This property only exists in the generated ping if an
+ // error actually occurred.
+ this.failureReason = undefined;
+ this.uid = "";
+ this.when = Date.now();
+ this.startTime = tryGetMonotonicTimestamp();
+ this.took = 0; // will be set later.
+
+ // All engines that have finished (ie, does not include the "current" one)
+ // We omit this from the ping if it's empty.
+ this.engines = [];
+ // The engine that has started but not yet stopped.
+ this.currentEngine = null;
+ }
+
+ toJSON() {
+ let result = {
+ when: this.when,
+ uid: this.uid,
+ took: this.took,
+ failureReason: this.failureReason,
+ status: this.status,
+ deviceID: this.deviceID,
+ devices: this.devices,
+ };
+ let engines = [];
+ for (let engine of this.engines) {
+ engines.push(engine.toJSON());
+ }
+ if (engines.length > 0) {
+ result.engines = engines;
+ }
+ return result;
+ }
+
+ finished(error) {
+ this.took = timeDeltaFrom(this.startTime);
+ if (this.currentEngine != null) {
+ log.error("Finished called for the sync before the current engine finished");
+ this.currentEngine.finished(null);
+ this.onEngineStop(this.currentEngine.name);
+ }
+ if (error) {
+ this.failureReason = transformError(error);
+ }
+
+ // We don't bother including the "devices" field if we can't come up with a
+ // UID or device ID for *this* device -- If that's the case, any data we'd
+ // put there would be likely to be full of garbage anyway.
+ let includeDeviceInfo = false;
+ try {
+ this.uid = Weave.Service.identity.hashedUID();
+ let deviceID = Weave.Service.identity.deviceID();
+ if (deviceID) {
+ // Combine the raw device id with the metrics uid to create a stable
+ // unique identifier that can't be mapped back to the user's FxA
+ // identity without knowing the metrics HMAC key.
+ this.deviceID = Utils.sha256(deviceID + this.uid);
+ includeDeviceInfo = true;
+ }
+ } catch (e) {
+ this.uid = "0".repeat(32);
+ this.deviceID = undefined;
+ }
+
+ if (includeDeviceInfo) {
+ let remoteDevices = Weave.Service.clientsEngine.remoteClients;
+ this.devices = remoteDevices.map(device => {
+ return {
+ os: device.os,
+ version: device.version,
+ id: Utils.sha256(device.id + this.uid)
+ };
+ });
+ }
+
+ // Check for engine statuses. -- We do this now, and not in engine.finished
+ // to make sure any statuses that get set "late" are recorded
+ for (let engine of this.engines) {
+ let status = Status.engines[engine.name];
+ if (status && status !== constants.ENGINE_SUCCEEDED) {
+ engine.status = status;
+ }
+ }
+
+ let statusObject = {};
+
+ let serviceStatus = Status.service;
+ if (serviceStatus && serviceStatus !== constants.STATUS_OK) {
+ statusObject.service = serviceStatus;
+ this.status = statusObject;
+ }
+ let syncStatus = Status.sync;
+ if (syncStatus && syncStatus !== constants.SYNC_SUCCEEDED) {
+ statusObject.sync = syncStatus;
+ this.status = statusObject;
+ }
+ }
+
+ onEngineStart(engineName) {
+ if (this._shouldIgnoreEngine(engineName, false)) {
+ return;
+ }
+
+ if (this.currentEngine) {
+ log.error(`Being told that engine ${engineName} has started, but current engine ${
+ this.currentEngine.name} hasn't stopped`);
+ // Just discard the current engine rather than making up data for it.
+ }
+ this.currentEngine = new EngineRecord(engineName);
+ }
+
+ onEngineStop(engineName, error) {
+ // We only care if it's the current engine if we have a current engine.
+ if (this._shouldIgnoreEngine(engineName, !!this.currentEngine)) {
+ return;
+ }
+ if (!this.currentEngine) {
+ // It's possible for us to get an error before the start message of an engine
+ // (somehow), in which case we still want to record that error.
+ if (!error) {
+ return;
+ }
+ log.error(`Error triggered on ${engineName} when no current engine exists: ${error}`);
+ this.currentEngine = new EngineRecord(engineName);
+ }
+ this.currentEngine.finished(error);
+ this.engines.push(this.currentEngine);
+ this.currentEngine = null;
+ }
+
+ onEngineApplied(engineName, counts) {
+ if (this._shouldIgnoreEngine(engineName)) {
+ return;
+ }
+ this.currentEngine.recordApplied(counts);
+ }
+
+ onEngineValidated(engineName, validationData) {
+ if (this._shouldIgnoreEngine(engineName, false)) {
+ return;
+ }
+ let engine = this.engines.find(e => e.name === engineName);
+ if (!engine && this.currentEngine && engineName === this.currentEngine.name) {
+ engine = this.currentEngine;
+ }
+ if (engine) {
+ engine.recordValidation(validationData);
+ } else {
+ log.warn(`Validation event triggered for engine ${engineName}, which hasn't been synced!`);
+ }
+ }
+
+ onEngineValidateError(engineName, error) {
+ if (this._shouldIgnoreEngine(engineName, false)) {
+ return;
+ }
+ let engine = this.engines.find(e => e.name === engineName);
+ if (!engine && this.currentEngine && engineName === this.currentEngine.name) {
+ engine = this.currentEngine;
+ }
+ if (engine) {
+ engine.recordValidationError(error);
+ } else {
+ log.warn(`Validation failure event triggered for engine ${engineName}, which hasn't been synced!`);
+ }
+ }
+
+ onEngineUploaded(engineName, counts) {
+ if (this._shouldIgnoreEngine(engineName)) {
+ return;
+ }
+ this.currentEngine.recordUploaded(counts);
+ }
+
+ _shouldIgnoreEngine(engineName, shouldBeCurrent = true) {
+ if (!this.allowedEngines.has(engineName)) {
+ log.info(`Notification for engine ${engineName}, but we aren't recording telemetry for it`);
+ return true;
+ }
+ if (shouldBeCurrent) {
+ if (!this.currentEngine || engineName != this.currentEngine.name) {
+ log.error(`Notification for engine ${engineName} but it isn't current`);
+ return true;
+ }
+ }
+ return false;
+ }
+}
+
+class SyncTelemetryImpl {
+ constructor(allowedEngines) {
+ log.level = Log.Level[Svc.Prefs.get("log.logger.telemetry", "Trace")];
+ // This is accessible so we can enable custom engines during tests.
+ this.allowedEngines = allowedEngines;
+ this.current = null;
+ this.setupObservers();
+
+ this.payloads = [];
+ this.discarded = 0;
+ this.maxPayloadCount = Svc.Prefs.get("telemetry.maxPayloadCount");
+ this.submissionInterval = Svc.Prefs.get("telemetry.submissionInterval") * 1000;
+ this.lastSubmissionTime = Telemetry.msSinceProcessStart();
+ }
+
+ getPingJSON(reason) {
+ return {
+ why: reason,
+ discarded: this.discarded || undefined,
+ version: PING_FORMAT_VERSION,
+ syncs: this.payloads.slice(),
+ };
+ }
+
+ finish(reason) {
+ // Note that we might be in the middle of a sync right now, and so we don't
+ // want to touch this.current.
+ let result = this.getPingJSON(reason);
+ this.payloads = [];
+ this.discarded = 0;
+ this.submit(result);
+ }
+
+ setupObservers() {
+ for (let topic of TOPICS) {
+ Observers.add(topic, this, this);
+ }
+ }
+
+ shutdown() {
+ this.finish("shutdown");
+ for (let topic of TOPICS) {
+ Observers.remove(topic, this, this);
+ }
+ }
+
+ submit(record) {
+ // We still call submit() with possibly illegal payloads so that tests can
+ // know that the ping was built. We don't end up submitting them, however.
+ if (record.syncs.length) {
+ log.trace(`submitting ${record.syncs.length} sync record(s) to telemetry`);
+ TelemetryController.submitExternalPing("sync", record);
+ }
+ }
+
+
+ onSyncStarted() {
+ if (this.current) {
+ log.warn("Observed weave:service:sync:start, but we're already recording a sync!");
+ // Just discard the old record, consistent with our handling of engines, above.
+ this.current = null;
+ }
+ this.current = new TelemetryRecord(this.allowedEngines);
+ }
+
+ _checkCurrent(topic) {
+ if (!this.current) {
+ log.warn(`Observed notification ${topic} but no current sync is being recorded.`);
+ return false;
+ }
+ return true;
+ }
+
+ onSyncFinished(error) {
+ if (!this.current) {
+ log.warn("onSyncFinished but we aren't recording");
+ return;
+ }
+ this.current.finished(error);
+ if (this.payloads.length < this.maxPayloadCount) {
+ this.payloads.push(this.current.toJSON());
+ } else {
+ ++this.discarded;
+ }
+ this.current = null;
+ if ((Telemetry.msSinceProcessStart() - this.lastSubmissionTime) > this.submissionInterval) {
+ this.finish("schedule");
+ this.lastSubmissionTime = Telemetry.msSinceProcessStart();
+ }
+ }
+
+ observe(subject, topic, data) {
+ log.trace(`observed ${topic} ${data}`);
+
+ switch (topic) {
+ case "profile-before-change":
+ this.shutdown();
+ break;
+
+ /* sync itself state changes */
+ case "weave:service:sync:start":
+ this.onSyncStarted();
+ break;
+
+ case "weave:service:sync:finish":
+ if (this._checkCurrent(topic)) {
+ this.onSyncFinished(null);
+ }
+ break;
+
+ case "weave:service:sync:error":
+ // argument needs to be truthy (this should always be the case)
+ this.onSyncFinished(subject || "Unknown");
+ break;
+
+ /* engine sync state changes */
+ case "weave:engine:sync:start":
+ if (this._checkCurrent(topic)) {
+ this.current.onEngineStart(data);
+ }
+ break;
+ case "weave:engine:sync:finish":
+ if (this._checkCurrent(topic)) {
+ this.current.onEngineStop(data, null);
+ }
+ break;
+
+ case "weave:engine:sync:error":
+ if (this._checkCurrent(topic)) {
+ // argument needs to be truthy (this should always be the case)
+ this.current.onEngineStop(data, subject || "Unknown");
+ }
+ break;
+
+ /* engine counts */
+ case "weave:engine:sync:applied":
+ if (this._checkCurrent(topic)) {
+ this.current.onEngineApplied(data, subject);
+ }
+ break;
+
+ case "weave:engine:sync:uploaded":
+ if (this._checkCurrent(topic)) {
+ this.current.onEngineUploaded(data, subject);
+ }
+ break;
+
+ case "weave:engine:validate:finish":
+ if (this._checkCurrent(topic)) {
+ this.current.onEngineValidated(data, subject);
+ }
+ break;
+
+ case "weave:engine:validate:error":
+ if (this._checkCurrent(topic)) {
+ this.current.onEngineValidateError(data, subject || "Unknown");
+ }
+ break;
+
+ default:
+ log.warn(`unexpected observer topic ${topic}`);
+ break;
+ }
+ }
+}
+
+this.SyncTelemetry = new SyncTelemetryImpl(ENGINES);
diff --git a/services/sync/modules/userapi.js b/services/sync/modules/userapi.js
new file mode 100644
index 000000000..e906440bd
--- /dev/null
+++ b/services/sync/modules/userapi.js
@@ -0,0 +1,224 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "UserAPI10Client",
+];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/util.js");
+
+/**
+ * A generic client for the user API 1.0 service.
+ *
+ * http://docs.services.mozilla.com/reg/apis.html
+ *
+ * Instances are constructed with the base URI of the service.
+ */
+this.UserAPI10Client = function UserAPI10Client(baseURI) {
+ this._log = Log.repository.getLogger("Sync.UserAPI");
+ this._log.level = Log.Level[Svc.Prefs.get("log.logger.userapi")];
+
+ this.baseURI = baseURI;
+}
+UserAPI10Client.prototype = {
+ USER_CREATE_ERROR_CODES: {
+ 2: "Incorrect or missing captcha.",
+ 4: "User exists.",
+ 6: "JSON parse failure.",
+ 7: "Missing password field.",
+ 9: "Requested password not strong enough.",
+ 12: "No email address on file.",
+ },
+
+ /**
+ * Determine whether a specified username exists.
+ *
+ * Callback receives the following arguments:
+ *
+ * (Error) Describes error that occurred or null if request was
+ * successful.
+ * (boolean) True if user exists. False if not. null if there was an error.
+ */
+ usernameExists: function usernameExists(username, cb) {
+ if (typeof(cb) != "function") {
+ throw new Error("cb must be a function.");
+ }
+
+ let url = this.baseURI + username;
+ let request = new RESTRequest(url);
+ request.get(this._onUsername.bind(this, cb, request));
+ },
+
+ /**
+ * Obtain the Weave (Sync) node for a specified user.
+ *
+ * The callback receives the following arguments:
+ *
+ * (Error) Describes error that occurred or null if request was successful.
+ * (string) Username request is for.
+ * (string) URL of user's node. If null and there is no error, no node could
+ * be assigned at the time of the request.
+ */
+ getWeaveNode: function getWeaveNode(username, password, cb) {
+ if (typeof(cb) != "function") {
+ throw new Error("cb must be a function.");
+ }
+
+ let request = this._getRequest(username, "/node/weave", password);
+ request.get(this._onWeaveNode.bind(this, cb, request));
+ },
+
+ /**
+ * Change a password for the specified user.
+ *
+ * @param username
+ * (string) The username whose password to change.
+ * @param oldPassword
+ * (string) The old, current password.
+ * @param newPassword
+ * (string) The new password to switch to.
+ */
+ changePassword: function changePassword(username, oldPassword, newPassword, cb) {
+ let request = this._getRequest(username, "/password", oldPassword);
+ request.onComplete = this._onChangePassword.bind(this, cb, request);
+ request.post(CommonUtils.encodeUTF8(newPassword));
+ },
+
+ createAccount: function createAccount(email, password, captchaChallenge,
+ captchaResponse, cb) {
+ let username = IdentityManager.prototype.usernameFromAccount(email);
+ let body = JSON.stringify({
+ "email": email,
+ "password": Utils.encodeUTF8(password),
+ "captcha-challenge": captchaChallenge,
+ "captcha-response": captchaResponse
+ });
+
+ let url = this.baseURI + username;
+ let request = new RESTRequest(url);
+
+ if (this.adminSecret) {
+ request.setHeader("X-Weave-Secret", this.adminSecret);
+ }
+
+ request.onComplete = this._onCreateAccount.bind(this, cb, request);
+ request.put(body);
+ },
+
+ _getRequest: function _getRequest(username, path, password=null) {
+ let url = this.baseURI + username + path;
+ let request = new RESTRequest(url);
+
+ if (password) {
+ let up = username + ":" + password;
+ request.setHeader("authorization", "Basic " + btoa(up));
+ }
+
+ return request;
+ },
+
+ _onUsername: function _onUsername(cb, request, error) {
+ if (error) {
+ cb(error, null);
+ return;
+ }
+
+ let body = request.response.body;
+ if (body == "0") {
+ cb(null, false);
+ return;
+ } else if (body == "1") {
+ cb(null, true);
+ return;
+ } else {
+ cb(new Error("Unknown response from server: " + body), null);
+ return;
+ }
+ },
+
+ _onWeaveNode: function _onWeaveNode(cb, request, error) {
+ if (error) {
+ cb.network = true;
+ cb(error, null);
+ return;
+ }
+
+ let response = request.response;
+
+ if (response.status == 200) {
+ let body = response.body;
+ if (body == "null") {
+ cb(null, null);
+ return;
+ }
+
+ cb(null, body);
+ return;
+ }
+
+ error = new Error("Sync node retrieval failed.");
+ switch (response.status) {
+ case 400:
+ error.denied = true;
+ break;
+ case 404:
+ error.notFound = true;
+ break;
+ default:
+ error.message = "Unexpected response code: " + response.status;
+ }
+
+ cb(error, null);
+ return;
+ },
+
+ _onChangePassword: function _onChangePassword(cb, request, error) {
+ this._log.info("Password change response received: " +
+ request.response.status);
+ if (error) {
+ cb(error);
+ return;
+ }
+
+ let response = request.response;
+ if (response.status != 200) {
+ cb(new Error("Password changed failed: " + response.body));
+ return;
+ }
+
+ cb(null);
+ },
+
+ _onCreateAccount: function _onCreateAccount(cb, request, error) {
+ let response = request.response;
+
+ this._log.info("Create account response: " + response.status + " " +
+ response.body);
+
+ if (error) {
+ cb(new Error("HTTP transport error."), null);
+ return;
+ }
+
+ if (response.status == 200) {
+ cb(null, response.body);
+ return;
+ }
+
+ error = new Error("Could not create user.");
+ error.body = response.body;
+
+ cb(error, null);
+ return;
+ },
+};
+Object.freeze(UserAPI10Client.prototype);
diff --git a/services/sync/modules/util.js b/services/sync/modules/util.js
new file mode 100644
index 000000000..e9dbcb37d
--- /dev/null
+++ b/services/sync/modules/util.js
@@ -0,0 +1,797 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["XPCOMUtils", "Services", "Utils", "Async", "Svc", "Str"];
+
+var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-common/stringbundle.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-common/async.js", this);
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/Services.jsm", this);
+Cu.import("resource://gre/modules/XPCOMUtils.jsm", this);
+Cu.import("resource://gre/modules/osfile.jsm", this);
+Cu.import("resource://gre/modules/Task.jsm", this);
+
+// FxAccountsCommon.js doesn't use a "namespace", so create one here.
+XPCOMUtils.defineLazyGetter(this, "FxAccountsCommon", function() {
+ let FxAccountsCommon = {};
+ Cu.import("resource://gre/modules/FxAccountsCommon.js", FxAccountsCommon);
+ return FxAccountsCommon;
+});
+
+/*
+ * Utility functions
+ */
+
+this.Utils = {
+ // Alias in functions from CommonUtils. These previously were defined here.
+ // In the ideal world, references to these would be removed.
+ nextTick: CommonUtils.nextTick,
+ namedTimer: CommonUtils.namedTimer,
+ makeURI: CommonUtils.makeURI,
+ encodeUTF8: CommonUtils.encodeUTF8,
+ decodeUTF8: CommonUtils.decodeUTF8,
+ safeAtoB: CommonUtils.safeAtoB,
+ byteArrayToString: CommonUtils.byteArrayToString,
+ bytesAsHex: CommonUtils.bytesAsHex,
+ hexToBytes: CommonUtils.hexToBytes,
+ encodeBase32: CommonUtils.encodeBase32,
+ decodeBase32: CommonUtils.decodeBase32,
+
+ // Aliases from CryptoUtils.
+ generateRandomBytes: CryptoUtils.generateRandomBytes,
+ computeHTTPMACSHA1: CryptoUtils.computeHTTPMACSHA1,
+ digestUTF8: CryptoUtils.digestUTF8,
+ digestBytes: CryptoUtils.digestBytes,
+ sha1: CryptoUtils.sha1,
+ sha1Base32: CryptoUtils.sha1Base32,
+ sha256: CryptoUtils.sha256,
+ makeHMACKey: CryptoUtils.makeHMACKey,
+ makeHMACHasher: CryptoUtils.makeHMACHasher,
+ hkdfExpand: CryptoUtils.hkdfExpand,
+ pbkdf2Generate: CryptoUtils.pbkdf2Generate,
+ deriveKeyFromPassphrase: CryptoUtils.deriveKeyFromPassphrase,
+ getHTTPMACSHA1Header: CryptoUtils.getHTTPMACSHA1Header,
+
+ /**
+ * The string to use as the base User-Agent in Sync requests.
+ * This string will look something like
+ *
+ * Firefox/49.0a1 (Windows NT 6.1; WOW64; rv:46.0) FxSync/1.51.0.20160516142357.desktop
+ */
+ _userAgent: null,
+ get userAgent() {
+ if (!this._userAgent) {
+ let hph = Cc["@mozilla.org/network/protocol;1?name=http"].getService(Ci.nsIHttpProtocolHandler);
+ this._userAgent =
+ Services.appinfo.name + "/" + Services.appinfo.version + // Product.
+ " (" + hph.oscpu + ")" + // (oscpu)
+ " FxSync/" + WEAVE_VERSION + "." + // Sync.
+ Services.appinfo.appBuildID + "."; // Build.
+ }
+ return this._userAgent + Svc.Prefs.get("client.type", "desktop");
+ },
+
+ /**
+ * Wrap a function to catch all exceptions and log them
+ *
+ * @usage MyObj._catch = Utils.catch;
+ * MyObj.foo = function() { this._catch(func)(); }
+ *
+ * Optionally pass a function which will be called if an
+ * exception occurs.
+ */
+ catch: function Utils_catch(func, exceptionCallback) {
+ let thisArg = this;
+ return function WrappedCatch() {
+ try {
+ return func.call(thisArg);
+ }
+ catch(ex) {
+ thisArg._log.debug("Exception calling " + (func.name || "anonymous function"), ex);
+ if (exceptionCallback) {
+ return exceptionCallback.call(thisArg, ex);
+ }
+ return null;
+ }
+ };
+ },
+
+ /**
+ * Wrap a function to call lock before calling the function then unlock.
+ *
+ * @usage MyObj._lock = Utils.lock;
+ * MyObj.foo = function() { this._lock(func)(); }
+ */
+ lock: function lock(label, func) {
+ let thisArg = this;
+ return function WrappedLock() {
+ if (!thisArg.lock()) {
+ throw "Could not acquire lock. Label: \"" + label + "\".";
+ }
+
+ try {
+ return func.call(thisArg);
+ }
+ finally {
+ thisArg.unlock();
+ }
+ };
+ },
+
+ isLockException: function isLockException(ex) {
+ return ex && ex.indexOf && ex.indexOf("Could not acquire lock.") == 0;
+ },
+
+ /**
+ * Wrap functions to notify when it starts and finishes executing or if it
+ * threw an error.
+ *
+ * The message is a combination of a provided prefix, the local name, and
+ * the event. Possible events are: "start", "finish", "error". The subject
+ * is the function's return value on "finish" or the caught exception on
+ * "error". The data argument is the predefined data value.
+ *
+ * Example:
+ *
+ * @usage function MyObj(name) {
+ * this.name = name;
+ * this._notify = Utils.notify("obj:");
+ * }
+ * MyObj.prototype = {
+ * foo: function() this._notify("func", "data-arg", function () {
+ * //...
+ * }(),
+ * };
+ */
+ notify: function Utils_notify(prefix) {
+ return function NotifyMaker(name, data, func) {
+ let thisArg = this;
+ let notify = function(state, subject) {
+ let mesg = prefix + name + ":" + state;
+ thisArg._log.trace("Event: " + mesg);
+ Observers.notify(mesg, subject, data);
+ };
+
+ return function WrappedNotify() {
+ try {
+ notify("start", null);
+ let ret = func.call(thisArg);
+ notify("finish", ret);
+ return ret;
+ }
+ catch(ex) {
+ notify("error", ex);
+ throw ex;
+ }
+ };
+ };
+ },
+
+ /**
+ * GUIDs are 9 random bytes encoded with base64url (RFC 4648).
+ * That makes them 12 characters long with 72 bits of entropy.
+ */
+ makeGUID: function makeGUID() {
+ return CommonUtils.encodeBase64URL(Utils.generateRandomBytes(9));
+ },
+
+ _base64url_regex: /^[-abcdefghijklmnopqrstuvwxyz0123456789_]{12}$/i,
+ checkGUID: function checkGUID(guid) {
+ return !!guid && this._base64url_regex.test(guid);
+ },
+
+ /**
+ * Add a simple getter/setter to an object that defers access of a property
+ * to an inner property.
+ *
+ * @param obj
+ * Object to add properties to defer in its prototype
+ * @param defer
+ * Property of obj to defer to
+ * @param prop
+ * Property name to defer (or an array of property names)
+ */
+ deferGetSet: function Utils_deferGetSet(obj, defer, prop) {
+ if (Array.isArray(prop))
+ return prop.map(prop => Utils.deferGetSet(obj, defer, prop));
+
+ let prot = obj.prototype;
+
+ // Create a getter if it doesn't exist yet
+ if (!prot.__lookupGetter__(prop)) {
+ prot.__defineGetter__(prop, function () {
+ return this[defer][prop];
+ });
+ }
+
+ // Create a setter if it doesn't exist yet
+ if (!prot.__lookupSetter__(prop)) {
+ prot.__defineSetter__(prop, function (val) {
+ this[defer][prop] = val;
+ });
+ }
+ },
+
+ lazyStrings: function Weave_lazyStrings(name) {
+ let bundle = "chrome://weave/locale/services/" + name + ".properties";
+ return () => new StringBundle(bundle);
+ },
+
+ deepEquals: function eq(a, b) {
+ // If they're triple equals, then it must be equals!
+ if (a === b)
+ return true;
+
+ // If they weren't equal, they must be objects to be different
+ if (typeof a != "object" || typeof b != "object")
+ return false;
+
+ // But null objects won't have properties to compare
+ if (a === null || b === null)
+ return false;
+
+ // Make sure all of a's keys have a matching value in b
+ for (let k in a)
+ if (!eq(a[k], b[k]))
+ return false;
+
+ // Do the same for b's keys but skip those that we already checked
+ for (let k in b)
+ if (!(k in a) && !eq(a[k], b[k]))
+ return false;
+
+ return true;
+ },
+
+ // Generator and discriminator for HMAC exceptions.
+ // Split these out in case we want to make them richer in future, and to
+ // avoid inevitable confusion if the message changes.
+ throwHMACMismatch: function throwHMACMismatch(shouldBe, is) {
+ throw "Record SHA256 HMAC mismatch: should be " + shouldBe + ", is " + is;
+ },
+
+ isHMACMismatch: function isHMACMismatch(ex) {
+ const hmacFail = "Record SHA256 HMAC mismatch: ";
+ return ex && ex.indexOf && (ex.indexOf(hmacFail) == 0);
+ },
+
+ /**
+ * Turn RFC 4648 base32 into our own user-friendly version.
+ * ABCDEFGHIJKLMNOPQRSTUVWXYZ234567
+ * becomes
+ * abcdefghijk8mn9pqrstuvwxyz234567
+ */
+ base32ToFriendly: function base32ToFriendly(input) {
+ return input.toLowerCase()
+ .replace(/l/g, '8')
+ .replace(/o/g, '9');
+ },
+
+ base32FromFriendly: function base32FromFriendly(input) {
+ return input.toUpperCase()
+ .replace(/8/g, 'L')
+ .replace(/9/g, 'O');
+ },
+
+ /**
+ * Key manipulation.
+ */
+
+ // Return an octet string in friendly base32 *with no trailing =*.
+ encodeKeyBase32: function encodeKeyBase32(keyData) {
+ return Utils.base32ToFriendly(
+ Utils.encodeBase32(keyData))
+ .slice(0, SYNC_KEY_ENCODED_LENGTH);
+ },
+
+ decodeKeyBase32: function decodeKeyBase32(encoded) {
+ return Utils.decodeBase32(
+ Utils.base32FromFriendly(
+ Utils.normalizePassphrase(encoded)))
+ .slice(0, SYNC_KEY_DECODED_LENGTH);
+ },
+
+ base64Key: function base64Key(keyData) {
+ return btoa(keyData);
+ },
+
+ /**
+ * N.B., salt should be base64 encoded, even though we have to decode
+ * it later!
+ */
+ derivePresentableKeyFromPassphrase : function derivePresentableKeyFromPassphrase(passphrase, salt, keyLength, forceJS) {
+ let k = CryptoUtils.deriveKeyFromPassphrase(passphrase, salt, keyLength,
+ forceJS);
+ return Utils.encodeKeyBase32(k);
+ },
+
+ /**
+ * N.B., salt should be base64 encoded, even though we have to decode
+ * it later!
+ */
+ deriveEncodedKeyFromPassphrase : function deriveEncodedKeyFromPassphrase(passphrase, salt, keyLength, forceJS) {
+ let k = CryptoUtils.deriveKeyFromPassphrase(passphrase, salt, keyLength,
+ forceJS);
+ return Utils.base64Key(k);
+ },
+
+ /**
+ * Take a base64-encoded 128-bit AES key, returning it as five groups of five
+ * uppercase alphanumeric characters, separated by hyphens.
+ * A.K.A. base64-to-base32 encoding.
+ */
+ presentEncodedKeyAsSyncKey : function presentEncodedKeyAsSyncKey(encodedKey) {
+ return Utils.encodeKeyBase32(atob(encodedKey));
+ },
+
+ /**
+ * Load a JSON file from disk in the profile directory.
+ *
+ * @param filePath
+ * JSON file path load from profile. Loaded file will be
+ * <profile>/<filePath>.json. i.e. Do not specify the ".json"
+ * extension.
+ * @param that
+ * Object to use for logging and "this" for callback.
+ * @param callback
+ * Function to process json object as its first argument. If the file
+ * could not be loaded, the first argument will be undefined.
+ */
+ jsonLoad: Task.async(function*(filePath, that, callback) {
+ let path = OS.Path.join(OS.Constants.Path.profileDir, "weave", filePath + ".json");
+
+ if (that._log) {
+ that._log.trace("Loading json from disk: " + filePath);
+ }
+
+ let json;
+
+ try {
+ json = yield CommonUtils.readJSON(path);
+ } catch (e) {
+ if (e instanceof OS.File.Error && e.becauseNoSuchFile) {
+ // Ignore non-existent files, but explicitly return null.
+ json = null;
+ } else {
+ if (that._log) {
+ that._log.debug("Failed to load json", e);
+ }
+ }
+ }
+
+ callback.call(that, json);
+ }),
+
+ /**
+ * Save a json-able object to disk in the profile directory.
+ *
+ * @param filePath
+ * JSON file path save to <filePath>.json
+ * @param that
+ * Object to use for logging and "this" for callback
+ * @param obj
+ * Function to provide json-able object to save. If this isn't a
+ * function, it'll be used as the object to make a json string.
+ * @param callback
+ * Function called when the write has been performed. Optional.
+ * The first argument will be a Components.results error
+ * constant on error or null if no error was encountered (and
+ * the file saved successfully).
+ */
+ jsonSave: Task.async(function*(filePath, that, obj, callback) {
+ let path = OS.Path.join(OS.Constants.Path.profileDir, "weave",
+ ...(filePath + ".json").split("/"));
+ let dir = OS.Path.dirname(path);
+ let error = null;
+
+ try {
+ yield OS.File.makeDir(dir, { from: OS.Constants.Path.profileDir });
+
+ if (that._log) {
+ that._log.trace("Saving json to disk: " + path);
+ }
+
+ let json = typeof obj == "function" ? obj.call(that) : obj;
+
+ yield CommonUtils.writeJSON(json, path);
+ } catch (e) {
+ error = e
+ }
+
+ if (typeof callback == "function") {
+ callback.call(that, error);
+ }
+ }),
+
+ /**
+ * Move a json file in the profile directory. Will fail if a file exists at the
+ * destination.
+ *
+ * @returns a promise that resolves to undefined on success, or rejects on failure
+ *
+ * @param aFrom
+ * Current path to the JSON file saved on disk, relative to profileDir/weave
+ * .json will be appended to the file name.
+ * @param aTo
+ * New path to the JSON file saved on disk, relative to profileDir/weave
+ * .json will be appended to the file name.
+ * @param that
+ * Object to use for logging
+ */
+ jsonMove(aFrom, aTo, that) {
+ let pathFrom = OS.Path.join(OS.Constants.Path.profileDir, "weave",
+ ...(aFrom + ".json").split("/"));
+ let pathTo = OS.Path.join(OS.Constants.Path.profileDir, "weave",
+ ...(aTo + ".json").split("/"));
+ if (that._log) {
+ that._log.trace("Moving " + pathFrom + " to " + pathTo);
+ }
+ return OS.File.move(pathFrom, pathTo, { noOverwrite: true });
+ },
+
+ /**
+ * Removes a json file in the profile directory.
+ *
+ * @returns a promise that resolves to undefined on success, or rejects on failure
+ *
+ * @param filePath
+ * Current path to the JSON file saved on disk, relative to profileDir/weave
+ * .json will be appended to the file name.
+ * @param that
+ * Object to use for logging
+ */
+ jsonRemove(filePath, that) {
+ let path = OS.Path.join(OS.Constants.Path.profileDir, "weave",
+ ...(filePath + ".json").split("/"));
+ if (that._log) {
+ that._log.trace("Deleting " + path);
+ }
+ return OS.File.remove(path, { ignoreAbsent: true });
+ },
+
+ getErrorString: function Utils_getErrorString(error, args) {
+ try {
+ return Str.errors.get(error, args || null);
+ } catch (e) {}
+
+ // basically returns "Unknown Error"
+ return Str.errors.get("error.reason.unknown");
+ },
+
+ /**
+ * Generate 26 characters.
+ */
+ generatePassphrase: function generatePassphrase() {
+ // Note that this is a different base32 alphabet to the one we use for
+ // other tasks. It's lowercase, uses different letters, and needs to be
+ // decoded with decodeKeyBase32, not just decodeBase32.
+ return Utils.encodeKeyBase32(CryptoUtils.generateRandomBytes(16));
+ },
+
+ /**
+ * The following are the methods supported for UI use:
+ *
+ * * isPassphrase:
+ * determines whether a string is either a normalized or presentable
+ * passphrase.
+ * * hyphenatePassphrase:
+ * present a normalized passphrase for display. This might actually
+ * perform work beyond just hyphenation; sorry.
+ * * hyphenatePartialPassphrase:
+ * present a fragment of a normalized passphrase for display.
+ * * normalizePassphrase:
+ * take a presentable passphrase and reduce it to a normalized
+ * representation for storage. normalizePassphrase can safely be called
+ * on normalized input.
+ * * normalizeAccount:
+ * take user input for account/username, cleaning up appropriately.
+ */
+
+ isPassphrase: function(s) {
+ if (s) {
+ return /^[abcdefghijkmnpqrstuvwxyz23456789]{26}$/.test(Utils.normalizePassphrase(s));
+ }
+ return false;
+ },
+
+ /**
+ * Hyphenate a passphrase (26 characters) into groups.
+ * abbbbccccddddeeeeffffggggh
+ * =>
+ * a-bbbbc-cccdd-ddeee-effff-ggggh
+ */
+ hyphenatePassphrase: function hyphenatePassphrase(passphrase) {
+ // For now, these are the same.
+ return Utils.hyphenatePartialPassphrase(passphrase, true);
+ },
+
+ hyphenatePartialPassphrase: function hyphenatePartialPassphrase(passphrase, omitTrailingDash) {
+ if (!passphrase)
+ return null;
+
+ // Get the raw data input. Just base32.
+ let data = passphrase.toLowerCase().replace(/[^abcdefghijkmnpqrstuvwxyz23456789]/g, "");
+
+ // This is the neatest way to do this.
+ if ((data.length == 1) && !omitTrailingDash)
+ return data + "-";
+
+ // Hyphenate it.
+ let y = data.substr(0,1);
+ let z = data.substr(1).replace(/(.{1,5})/g, "-$1");
+
+ // Correct length? We're done.
+ if ((z.length == 30) || omitTrailingDash)
+ return y + z;
+
+ // Add a trailing dash if appropriate.
+ return (y + z.replace(/([^-]{5})$/, "$1-")).substr(0, SYNC_KEY_HYPHENATED_LENGTH);
+ },
+
+ normalizePassphrase: function normalizePassphrase(pp) {
+ // Short var name... have you seen the lines below?!
+ // Allow leading and trailing whitespace.
+ pp = pp.trim().toLowerCase();
+
+ // 20-char sync key.
+ if (pp.length == 23 &&
+ [5, 11, 17].every(i => pp[i] == '-')) {
+
+ return pp.slice(0, 5) + pp.slice(6, 11)
+ + pp.slice(12, 17) + pp.slice(18, 23);
+ }
+
+ // "Modern" 26-char key.
+ if (pp.length == 31 &&
+ [1, 7, 13, 19, 25].every(i => pp[i] == '-')) {
+
+ return pp.slice(0, 1) + pp.slice(2, 7)
+ + pp.slice(8, 13) + pp.slice(14, 19)
+ + pp.slice(20, 25) + pp.slice(26, 31);
+ }
+
+ // Something else -- just return.
+ return pp;
+ },
+
+ normalizeAccount: function normalizeAccount(acc) {
+ return acc.trim();
+ },
+
+ /**
+ * Create an array like the first but without elements of the second. Reuse
+ * arrays if possible.
+ */
+ arraySub: function arraySub(minuend, subtrahend) {
+ if (!minuend.length || !subtrahend.length)
+ return minuend;
+ return minuend.filter(i => subtrahend.indexOf(i) == -1);
+ },
+
+ /**
+ * Build the union of two arrays. Reuse arrays if possible.
+ */
+ arrayUnion: function arrayUnion(foo, bar) {
+ if (!foo.length)
+ return bar;
+ if (!bar.length)
+ return foo;
+ return foo.concat(Utils.arraySub(bar, foo));
+ },
+
+ bind2: function Async_bind2(object, method) {
+ return function innerBind() { return method.apply(object, arguments); };
+ },
+
+ /**
+ * Is there a master password configured, regardless of current lock state?
+ */
+ mpEnabled: function mpEnabled() {
+ let modules = Cc["@mozilla.org/security/pkcs11moduledb;1"]
+ .getService(Ci.nsIPKCS11ModuleDB);
+ let sdrSlot = modules.findSlotByName("");
+ let status = sdrSlot.status;
+ let slots = Ci.nsIPKCS11Slot;
+
+ return status != slots.SLOT_UNINITIALIZED && status != slots.SLOT_READY;
+ },
+
+ /**
+ * Is there a master password configured and currently locked?
+ */
+ mpLocked: function mpLocked() {
+ let modules = Cc["@mozilla.org/security/pkcs11moduledb;1"]
+ .getService(Ci.nsIPKCS11ModuleDB);
+ let sdrSlot = modules.findSlotByName("");
+ let status = sdrSlot.status;
+ let slots = Ci.nsIPKCS11Slot;
+
+ if (status == slots.SLOT_READY || status == slots.SLOT_LOGGED_IN
+ || status == slots.SLOT_UNINITIALIZED)
+ return false;
+
+ if (status == slots.SLOT_NOT_LOGGED_IN)
+ return true;
+
+ // something wacky happened, pretend MP is locked
+ return true;
+ },
+
+ // If Master Password is enabled and locked, present a dialog to unlock it.
+ // Return whether the system is unlocked.
+ ensureMPUnlocked: function ensureMPUnlocked() {
+ if (!Utils.mpLocked()) {
+ return true;
+ }
+ let sdr = Cc["@mozilla.org/security/sdr;1"]
+ .getService(Ci.nsISecretDecoderRing);
+ try {
+ sdr.encryptString("bacon");
+ return true;
+ } catch(e) {}
+ return false;
+ },
+
+ /**
+ * Return a value for a backoff interval. Maximum is eight hours, unless
+ * Status.backoffInterval is higher.
+ *
+ */
+ calculateBackoff: function calculateBackoff(attempts, baseInterval,
+ statusInterval) {
+ let backoffInterval = attempts *
+ (Math.floor(Math.random() * baseInterval) +
+ baseInterval);
+ return Math.max(Math.min(backoffInterval, MAXIMUM_BACKOFF_INTERVAL),
+ statusInterval);
+ },
+
+ /**
+ * Return a set of hostnames (including the protocol) which may have
+ * credentials for sync itself stored in the login manager.
+ *
+ * In general, these hosts will not have their passwords synced, will be
+ * reset when we drop sync credentials, etc.
+ */
+ getSyncCredentialsHosts: function() {
+ let result = new Set(this.getSyncCredentialsHostsLegacy());
+ for (let host of this.getSyncCredentialsHostsFxA()) {
+ result.add(host);
+ }
+ return result;
+ },
+
+ /*
+ * Get the "legacy" identity hosts.
+ */
+ getSyncCredentialsHostsLegacy: function() {
+ // the legacy sync host
+ return new Set([PWDMGR_HOST]);
+ },
+
+ /*
+ * Get the FxA identity hosts.
+ */
+ getSyncCredentialsHostsFxA: function() {
+ let result = new Set();
+ // the FxA host
+ result.add(FxAccountsCommon.FXA_PWDMGR_HOST);
+ // We used to include the FxA hosts (hence the Set() result) but we now
+ // don't give them special treatment (hence the Set() with exactly 1 item)
+ return result;
+ },
+
+ getDefaultDeviceName() {
+ // Generate a client name if we don't have a useful one yet
+ let env = Cc["@mozilla.org/process/environment;1"]
+ .getService(Ci.nsIEnvironment);
+ let user = env.get("USER") || env.get("USERNAME") ||
+ Svc.Prefs.get("account") || Svc.Prefs.get("username");
+ // A little hack for people using the the moz-build environment on Windows
+ // which sets USER to the literal "%USERNAME%" (yes, really)
+ if (user == "%USERNAME%" && env.get("USERNAME")) {
+ user = env.get("USERNAME");
+ }
+
+ let brand = new StringBundle("chrome://branding/locale/brand.properties");
+ let brandName = brand.get("brandShortName");
+
+ let appName;
+ try {
+ let syncStrings = new StringBundle("chrome://browser/locale/sync.properties");
+ appName = syncStrings.getFormattedString("sync.defaultAccountApplication", [brandName]);
+ } catch (ex) {}
+ appName = appName || brandName;
+
+ let system =
+ // 'device' is defined on unix systems
+ Cc["@mozilla.org/system-info;1"].getService(Ci.nsIPropertyBag2).get("device") ||
+ // hostname of the system, usually assigned by the user or admin
+ Cc["@mozilla.org/system-info;1"].getService(Ci.nsIPropertyBag2).get("host") ||
+ // fall back on ua info string
+ Cc["@mozilla.org/network/protocol;1?name=http"].getService(Ci.nsIHttpProtocolHandler).oscpu;
+
+ return Str.sync.get("client.name2", [user, appName, system]);
+ },
+
+ getDeviceName() {
+ const deviceName = Svc.Prefs.get("client.name", "");
+
+ if (deviceName === "") {
+ return this.getDefaultDeviceName();
+ }
+
+ return deviceName;
+ },
+
+ getDeviceType() {
+ return Svc.Prefs.get("client.type", DEVICE_TYPE_DESKTOP);
+ },
+
+ formatTimestamp(date) {
+ // Format timestamp as: "%Y-%m-%d %H:%M:%S"
+ let year = String(date.getFullYear());
+ let month = String(date.getMonth() + 1).padStart(2, "0");
+ let day = String(date.getDate()).padStart(2, "0");
+ let hours = String(date.getHours()).padStart(2, "0");
+ let minutes = String(date.getMinutes()).padStart(2, "0");
+ let seconds = String(date.getSeconds()).padStart(2, "0");
+
+ return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}`;
+ }
+};
+
+XPCOMUtils.defineLazyGetter(Utils, "_utf8Converter", function() {
+ let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"]
+ .createInstance(Ci.nsIScriptableUnicodeConverter);
+ converter.charset = "UTF-8";
+ return converter;
+});
+
+/*
+ * Commonly-used services
+ */
+this.Svc = {};
+Svc.Prefs = new Preferences(PREFS_BRANCH);
+Svc.DefaultPrefs = new Preferences({branch: PREFS_BRANCH, defaultBranch: true});
+Svc.Obs = Observers;
+
+var _sessionCID = Services.appinfo.ID == SEAMONKEY_ID ?
+ "@mozilla.org/suite/sessionstore;1" :
+ "@mozilla.org/browser/sessionstore;1";
+
+[
+ ["Idle", "@mozilla.org/widget/idleservice;1", "nsIIdleService"],
+ ["Session", _sessionCID, "nsISessionStore"]
+].forEach(function([name, contract, iface]) {
+ XPCOMUtils.defineLazyServiceGetter(Svc, name, contract, iface);
+});
+
+XPCOMUtils.defineLazyModuleGetter(Svc, "FormHistory", "resource://gre/modules/FormHistory.jsm");
+
+Svc.__defineGetter__("Crypto", function() {
+ let cryptoSvc;
+ let ns = {};
+ Cu.import("resource://services-crypto/WeaveCrypto.js", ns);
+ cryptoSvc = new ns.WeaveCrypto();
+ delete Svc.Crypto;
+ return Svc.Crypto = cryptoSvc;
+});
+
+this.Str = {};
+["errors", "sync"].forEach(function(lazy) {
+ XPCOMUtils.defineLazyGetter(Str, lazy, Utils.lazyStrings(lazy));
+});
+
+Svc.Obs.add("xpcom-shutdown", function () {
+ for (let name in Svc)
+ delete Svc[name];
+});
diff --git a/services/sync/moz.build b/services/sync/moz.build
new file mode 100644
index 000000000..156f43797
--- /dev/null
+++ b/services/sync/moz.build
@@ -0,0 +1,78 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files('**'):
+ BUG_COMPONENT = ('Mozilla Services', 'Firefox Sync: Backend')
+
+DIRS += ['locales']
+
+XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini']
+
+EXTRA_COMPONENTS += [
+ 'SyncComponents.manifest',
+ 'Weave.js',
+]
+
+EXTRA_JS_MODULES['services-sync'] += [
+ 'modules/addonsreconciler.js',
+ 'modules/addonutils.js',
+ 'modules/bookmark_validator.js',
+ 'modules/browserid_identity.js',
+ 'modules/collection_validator.js',
+ 'modules/engines.js',
+ 'modules/FxaMigrator.jsm',
+ 'modules/identity.js',
+ 'modules/jpakeclient.js',
+ 'modules/keys.js',
+ 'modules/main.js',
+ 'modules/policies.js',
+ 'modules/record.js',
+ 'modules/resource.js',
+ 'modules/rest.js',
+ 'modules/service.js',
+ 'modules/status.js',
+ 'modules/SyncedTabs.jsm',
+ 'modules/telemetry.js',
+ 'modules/userapi.js',
+ 'modules/util.js',
+]
+
+EXTRA_PP_JS_MODULES['services-sync'] += [
+ 'modules/constants.js',
+]
+
+# Definitions used by constants.js
+DEFINES['weave_version'] = '1.54.0'
+DEFINES['weave_id'] = '{340c2bbc-ce74-4362-90b5-7c26312808ef}'
+
+EXTRA_JS_MODULES['services-sync'].engines += [
+ 'modules/engines/addons.js',
+ 'modules/engines/bookmarks.js',
+ 'modules/engines/clients.js',
+ 'modules/engines/extension-storage.js',
+ 'modules/engines/forms.js',
+ 'modules/engines/history.js',
+ 'modules/engines/passwords.js',
+ 'modules/engines/prefs.js',
+ 'modules/engines/tabs.js',
+]
+
+EXTRA_JS_MODULES['services-sync'].stages += [
+ 'modules/stages/cluster.js',
+ 'modules/stages/declined.js',
+ 'modules/stages/enginesync.js',
+]
+
+TESTING_JS_MODULES.services.sync += [
+ 'modules-testing/fakeservices.js',
+ 'modules-testing/fxa_utils.js',
+ 'modules-testing/rotaryengine.js',
+ 'modules-testing/utils.js',
+]
+
+JS_PREFERENCE_FILES += [
+ 'services-sync.js',
+]
diff --git a/services/sync/services-sync.js b/services/sync/services-sync.js
new file mode 100644
index 000000000..f4167c1ce
--- /dev/null
+++ b/services/sync/services-sync.js
@@ -0,0 +1,95 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+pref("services.sync.serverURL", "https://auth.services.mozilla.com/");
+pref("services.sync.userURL", "user/");
+pref("services.sync.miscURL", "misc/");
+pref("services.sync.termsURL", "https://services.mozilla.com/tos/");
+pref("services.sync.privacyURL", "https://services.mozilla.com/privacy-policy/");
+pref("services.sync.statusURL", "https://services.mozilla.com/status/");
+pref("services.sync.syncKeyHelpURL", "https://services.mozilla.com/help/synckey");
+
+pref("services.sync.lastversion", "firstrun");
+pref("services.sync.sendVersionInfo", true);
+
+pref("services.sync.scheduler.eolInterval", 604800); // 1 week
+pref("services.sync.scheduler.idleInterval", 3600); // 1 hour
+pref("services.sync.scheduler.activeInterval", 600); // 10 minutes
+pref("services.sync.scheduler.immediateInterval", 90); // 1.5 minutes
+pref("services.sync.scheduler.idleTime", 300); // 5 minutes
+
+pref("services.sync.scheduler.fxa.singleDeviceInterval", 3600); // 1 hour
+pref("services.sync.scheduler.sync11.singleDeviceInterval", 86400); // 1 day
+
+pref("services.sync.errorhandler.networkFailureReportTimeout", 1209600); // 2 weeks
+
+pref("services.sync.engine.addons", true);
+pref("services.sync.engine.bookmarks", true);
+pref("services.sync.engine.history", true);
+pref("services.sync.engine.passwords", true);
+pref("services.sync.engine.prefs", true);
+pref("services.sync.engine.tabs", true);
+pref("services.sync.engine.tabs.filteredUrls", "^(about:.*|chrome://weave/.*|wyciwyg:.*|file:.*|blob:.*)$");
+
+pref("services.sync.jpake.serverURL", "https://setup.services.mozilla.com/");
+pref("services.sync.jpake.pollInterval", 1000);
+pref("services.sync.jpake.firstMsgMaxTries", 300); // 5 minutes
+pref("services.sync.jpake.lastMsgMaxTries", 300); // 5 minutes
+pref("services.sync.jpake.maxTries", 10);
+
+// If true, add-on sync ignores changes to the user-enabled flag. This
+// allows people to have the same set of add-ons installed across all
+// profiles while maintaining different enabled states.
+pref("services.sync.addons.ignoreUserEnabledChanges", false);
+
+// Comma-delimited list of hostnames to trust for add-on install.
+pref("services.sync.addons.trustedSourceHostnames", "addons.mozilla.org");
+
+pref("services.sync.log.appender.console", "Fatal");
+pref("services.sync.log.appender.dump", "Error");
+pref("services.sync.log.appender.file.level", "Trace");
+pref("services.sync.log.appender.file.logOnError", true);
+pref("services.sync.log.appender.file.logOnSuccess", false);
+pref("services.sync.log.appender.file.maxErrorAge", 864000); // 10 days
+pref("services.sync.log.rootLogger", "Debug");
+pref("services.sync.log.logger.addonutils", "Debug");
+pref("services.sync.log.logger.declined", "Debug");
+pref("services.sync.log.logger.service.main", "Debug");
+pref("services.sync.log.logger.status", "Debug");
+pref("services.sync.log.logger.authenticator", "Debug");
+pref("services.sync.log.logger.network.resources", "Debug");
+pref("services.sync.log.logger.service.jpakeclient", "Debug");
+pref("services.sync.log.logger.engine.bookmarks", "Debug");
+pref("services.sync.log.logger.engine.clients", "Debug");
+pref("services.sync.log.logger.engine.forms", "Debug");
+pref("services.sync.log.logger.engine.history", "Debug");
+pref("services.sync.log.logger.engine.passwords", "Debug");
+pref("services.sync.log.logger.engine.prefs", "Debug");
+pref("services.sync.log.logger.engine.tabs", "Debug");
+pref("services.sync.log.logger.engine.addons", "Debug");
+pref("services.sync.log.logger.engine.extension-storage", "Debug");
+pref("services.sync.log.logger.engine.apps", "Debug");
+pref("services.sync.log.logger.identity", "Debug");
+pref("services.sync.log.logger.userapi", "Debug");
+pref("services.sync.log.cryptoDebug", false);
+
+pref("services.sync.fxa.termsURL", "https://accounts.firefox.com/legal/terms");
+pref("services.sync.fxa.privacyURL", "https://accounts.firefox.com/legal/privacy");
+
+pref("services.sync.telemetry.submissionInterval", 43200); // 12 hours in seconds
+pref("services.sync.telemetry.maxPayloadCount", 500);
+
+// Note that services.sync.validation.enabled is located in browser/app/profile/firefox.js
+
+// We consider validation this frequently. After considering validation, even
+// if we don't end up validating, we won't try again unless this much time has passed.
+pref("services.sync.validation.interval", 86400); // 24 hours in seconds
+
+// We only run validation `services.sync.validation.percentageChance` percent of
+// the time, even if it's been the right amount of time since the last validation,
+// and you meet the maxRecord checks.
+pref("services.sync.validation.percentageChance", 10);
+
+// We won't validate an engine if it has more than this many records on the server.
+pref("services.sync.validation.maxRecords", 100);
diff --git a/services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml b/services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml
new file mode 100644
index 000000000..6eb153ad1
--- /dev/null
+++ b/services/sync/tests/tps/addons/api/restartless-xpi@tests.mozilla.org.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5617">
+ <name>Restartless Test XPI</name>
+ <type id="1">Extension</type>
+ <guid>restartless-xpi@tests.mozilla.org</guid>
+ <slug>restartless-xpi</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="485">http://127.0.0.1:4567/addons/restartless.xpi</install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+ </addon>
+</searchresults>
diff --git a/services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml b/services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml
new file mode 100644
index 000000000..14a056013
--- /dev/null
+++ b/services/sync/tests/tps/addons/api/unsigned-xpi@tests.mozilla.org.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5612">
+ <name>Unsigned Test XPI</name>
+ <type id="1">Extension</type>
+ <guid>unsigned-xpi@tests.mozilla.org</guid>
+ <slug>unsigned-xpi</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="452">http://127.0.0.1:4567/addons/unsigned.xpi</install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+ </addon>
+</searchresults>
diff --git a/services/sync/tests/tps/addons/restartless.xpi b/services/sync/tests/tps/addons/restartless.xpi
new file mode 100644
index 000000000..973bc00cb
--- /dev/null
+++ b/services/sync/tests/tps/addons/restartless.xpi
Binary files differ
diff --git a/services/sync/tests/tps/addons/unsigned.xpi b/services/sync/tests/tps/addons/unsigned.xpi
new file mode 100644
index 000000000..51b00475a
--- /dev/null
+++ b/services/sync/tests/tps/addons/unsigned.xpi
Binary files differ
diff --git a/services/sync/tests/tps/all_tests.json b/services/sync/tests/tps/all_tests.json
new file mode 100644
index 000000000..ca7031e59
--- /dev/null
+++ b/services/sync/tests/tps/all_tests.json
@@ -0,0 +1,33 @@
+{ "tests": [
+ "test_bookmark_conflict.js",
+ "test_sync.js",
+ "test_prefs.js",
+ "test_tabs.js",
+ "test_passwords.js",
+ "test_history.js",
+ "test_formdata.js",
+ "test_bug530717.js",
+ "test_bug531489.js",
+ "test_bug538298.js",
+ "test_bug556509.js",
+ "test_bug562515.js",
+ "test_bug563989.js",
+ "test_bug535326.js",
+ "test_bug501528.js",
+ "test_bug575423.js",
+ "test_bug546807.js",
+ "test_history_collision.js",
+ "test_privbrw_passwords.js",
+ "test_privbrw_tabs.js",
+ "test_bookmarks_in_same_named_folder.js",
+ "test_client_wipe.js",
+ "test_special_tabs.js",
+ "test_addon_sanity.js",
+ "test_addon_restartless_xpi.js",
+ "test_addon_nonrestartless_xpi.js",
+ "test_addon_reconciling.js",
+ "test_addon_wipe.js"
+ ]
+}
+
+
diff --git a/services/sync/tests/tps/mozmill_sanity.js b/services/sync/tests/tps/mozmill_sanity.js
new file mode 100644
index 000000000..fbaed8f25
--- /dev/null
+++ b/services/sync/tests/tps/mozmill_sanity.js
@@ -0,0 +1,30 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+Components.utils.import('resource://tps/tps.jsm');
+
+var setupModule = function(module) {
+ module.controller = mozmill.getBrowserController();
+ assert.ok(true, "SetupModule passes");
+}
+
+var setupTest = function(module) {
+ assert.ok(true, "SetupTest passes");
+}
+
+var testTestStep = function() {
+ assert.ok(true, "test Passes");
+ controller.open("http://www.mozilla.org");
+
+ TPS.Login();
+ TPS.Sync(ACTIONS.ACTION_SYNC_WIPE_CLIENT);
+}
+
+var teardownTest = function () {
+ assert.ok(true, "teardownTest passes");
+}
+
+var teardownModule = function() {
+ assert.ok(true, "teardownModule passes");
+}
diff --git a/services/sync/tests/tps/mozmill_sanity2.js b/services/sync/tests/tps/mozmill_sanity2.js
new file mode 100644
index 000000000..f0fd0e3d5
--- /dev/null
+++ b/services/sync/tests/tps/mozmill_sanity2.js
@@ -0,0 +1,15 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var setupModule = function(module) {
+ module.controller = mozmill.getBrowserController();
+};
+
+var testGetNode = function() {
+ controller.open("about:support");
+ controller.waitForPageLoad();
+
+ var appbox = findElement.ID(controller.tabs.activeTab, "application-box");
+ assert.waitFor(() => appbox.getNode().textContent == 'Firefox', 'correct app name');
+};
diff --git a/services/sync/tests/tps/test_addon_nonrestartless_xpi.js b/services/sync/tests/tps/test_addon_nonrestartless_xpi.js
new file mode 100644
index 000000000..b6c85b351
--- /dev/null
+++ b/services/sync/tests/tps/test_addon_nonrestartless_xpi.js
@@ -0,0 +1,105 @@
+
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// This test verifies that install of extensions that require restart
+// syncs between profiles.
+EnableEngines(["addons"]);
+
+var phases = {
+ "phase01": "profile1",
+ "phase02": "profile1",
+ "phase03": "profile2",
+ "phase04": "profile2",
+ "phase05": "profile1",
+ "phase06": "profile1",
+ "phase07": "profile2",
+ "phase08": "profile2",
+ "phase09": "profile1",
+ "phase10": "profile1",
+ "phase11": "profile2",
+ "phase12": "profile2",
+ "phase13": "profile1",
+ "phase14": "profile1",
+ "phase15": "profile2",
+ "phase16": "profile2"
+};
+
+const id = "unsigned-xpi@tests.mozilla.org";
+
+Phase("phase01", [
+ [Addons.verifyNot, [id]],
+ [Addons.install, [id]],
+ [Sync]
+]);
+Phase("phase02", [
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase03", [
+ [Addons.verifyNot, [id]],
+ [Sync]
+]);
+Phase("phase04", [
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+
+// Now we disable the add-on
+Phase("phase05", [
+ [EnsureTracking],
+ [Addons.setEnabled, [id], STATE_DISABLED],
+ [Sync]
+]);
+Phase("phase06", [
+ [Addons.verify, [id], STATE_DISABLED],
+ [Sync]
+]);
+Phase("phase07", [
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase08", [
+ [Addons.verify, [id], STATE_DISABLED],
+ [Sync]
+]);
+
+// Now we re-enable it again.
+Phase("phase09", [
+ [EnsureTracking],
+ [Addons.setEnabled, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase10", [
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase11", [
+ [Addons.verify, [id], STATE_DISABLED],
+ [Sync]
+]);
+Phase("phase12", [
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+
+// And we uninstall it
+
+Phase("phase13", [
+ [EnsureTracking],
+ [Addons.verify, [id], STATE_ENABLED],
+ [Addons.uninstall, [id]],
+ [Sync]
+]);
+Phase("phase14", [
+ [Addons.verifyNot, [id]],
+ [Sync]
+]);
+Phase("phase15", [
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase16", [
+ [Addons.verifyNot, [id]],
+ [Sync]
+]);
diff --git a/services/sync/tests/tps/test_addon_reconciling.js b/services/sync/tests/tps/test_addon_reconciling.js
new file mode 100644
index 000000000..a4244ab03
--- /dev/null
+++ b/services/sync/tests/tps/test_addon_reconciling.js
@@ -0,0 +1,54 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// This test verifies that record reconciling works as expected. It makes
+// similar changes to add-ons in separate profiles and does a sync to verify
+// the proper action is taken.
+EnableEngines(["addons"]);
+
+var phases = {
+ "phase01": "profile1",
+ "phase02": "profile2",
+ "phase03": "profile1",
+ "phase04": "profile2",
+ "phase05": "profile1",
+ "phase06": "profile2"
+};
+
+const id = "restartless-xpi@tests.mozilla.org";
+
+// Install the add-on in 2 profiles.
+Phase("phase01", [
+ [Addons.verifyNot, [id]],
+ [Addons.install, [id]],
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase02", [
+ [Addons.verifyNot, [id]],
+ [Sync],
+ [Addons.verify, [id], STATE_ENABLED]
+]);
+
+// Now we disable in one and uninstall in the other.
+Phase("phase03", [
+ [Sync], // Get GUID updates, potentially.
+ [Addons.setEnabled, [id], STATE_DISABLED],
+ // We've changed the state, but don't want this profile to sync until phase5,
+ // so if we ran a validation now we'd be expecting to find errors.
+ [Addons.skipValidation]
+]);
+Phase("phase04", [
+ [EnsureTracking],
+ [Addons.uninstall, [id]],
+ [Sync]
+]);
+
+// When we sync, the uninstall should take precedence because it was newer.
+Phase("phase05", [
+ [Sync]
+]);
+Phase("phase06", [
+ [Sync],
+ [Addons.verifyNot, [id]]
+]);
diff --git a/services/sync/tests/tps/test_addon_restartless_xpi.js b/services/sync/tests/tps/test_addon_restartless_xpi.js
new file mode 100644
index 000000000..b242c95f0
--- /dev/null
+++ b/services/sync/tests/tps/test_addon_restartless_xpi.js
@@ -0,0 +1,70 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// This test verifies that install of restartless extensions syncs to
+// other profiles.
+EnableEngines(["addons"]);
+
+var phases = {
+ "phase01": "profile1",
+ "phase02": "profile2",
+ "phase03": "profile1",
+ "phase04": "profile2",
+ "phase05": "profile1",
+ "phase06": "profile2",
+ "phase07": "profile1",
+ "phase08": "profile2"
+};
+
+const id = "restartless-xpi@tests.mozilla.org";
+
+// Verify install is synced
+Phase("phase01", [
+ [Addons.verifyNot, [id]],
+ [Addons.install, [id]],
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase02", [
+ [Addons.verifyNot, [id]],
+ [Sync],
+ [Addons.verify, [id], STATE_ENABLED]
+]);
+
+// Now disable and see that is is synced.
+Phase("phase03", [
+ [EnsureTracking],
+ [Addons.setEnabled, [id], STATE_DISABLED],
+ [Addons.verify, [id], STATE_DISABLED],
+ [Sync]
+]);
+Phase("phase04", [
+ [Sync],
+ [Addons.verify, [id], STATE_DISABLED]
+]);
+
+// Enable and see it is synced.
+Phase("phase05", [
+ [EnsureTracking],
+ [Addons.setEnabled, [id], STATE_ENABLED],
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync]
+]);
+Phase("phase06", [
+ [Sync],
+ [Addons.verify, [id], STATE_ENABLED]
+]);
+
+// Uninstall and see it is synced.
+Phase("phase07", [
+ [EnsureTracking],
+ [Addons.verify, [id], STATE_ENABLED],
+ [Addons.uninstall, [id]],
+ [Addons.verifyNot, [id]],
+ [Sync]
+]);
+Phase("phase08", [
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync],
+ [Addons.verifyNot, [id]]
+]);
diff --git a/services/sync/tests/tps/test_addon_sanity.js b/services/sync/tests/tps/test_addon_sanity.js
new file mode 100644
index 000000000..240918094
--- /dev/null
+++ b/services/sync/tests/tps/test_addon_sanity.js
@@ -0,0 +1,30 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+
+EnableEngines(["addons"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile1" };
+
+const id = "unsigned-xpi@tests.mozilla.org";
+
+Phase("phase1", [
+ [Addons.install, [id]],
+ // Non-restartless add-on shouldn't be found after install.
+ [Addons.verifyNot, [id]],
+
+ // But it should be marked for Sync.
+ [Sync]
+]);
+
+Phase("phase2", [
+ // Add-on should be present after restart
+ [Addons.verify, [id], STATE_ENABLED],
+ [Sync] // Sync to ensure everything is initialized enough for the addon validator to run
+]);
diff --git a/services/sync/tests/tps/test_addon_wipe.js b/services/sync/tests/tps/test_addon_wipe.js
new file mode 100644
index 000000000..60131abc0
--- /dev/null
+++ b/services/sync/tests/tps/test_addon_wipe.js
@@ -0,0 +1,35 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// This test ensures that a client wipe followed by an "initial" sync will
+// restore add-ons. This test should expose flaws in the reconciling logic,
+// specifically around AddonsReconciler. This test is in response to bug
+// 792990.
+
+EnableEngines(["addons"]);
+
+var phases = {
+ "phase01": "profile1",
+ "phase02": "profile1",
+ "phase03": "profile1"
+};
+
+const id1 = "restartless-xpi@tests.mozilla.org";
+const id2 = "unsigned-xpi@tests.mozilla.org";
+
+Phase("phase01", [
+ [Addons.install, [id1]],
+ [Addons.install, [id2]],
+ [Sync]
+]);
+Phase("phase02", [
+ [Addons.verify, [id1], STATE_ENABLED],
+ [Addons.verify, [id2], STATE_ENABLED],
+ [Sync, SYNC_WIPE_CLIENT],
+ [Sync]
+]);
+Phase("phase03", [
+ [Addons.verify, [id1], STATE_ENABLED],
+ [Addons.verify, [id2], STATE_ENABLED],
+ [Sync] // Sync to ensure that the addon validator can run without error
+]);
diff --git a/services/sync/tests/tps/test_bookmark_conflict.js b/services/sync/tests/tps/test_bookmark_conflict.js
new file mode 100644
index 000000000..cfe9d782e
--- /dev/null
+++ b/services/sync/tests/tps/test_bookmark_conflict.js
@@ -0,0 +1,143 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["bookmarks"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+
+// the initial list of bookmarks to add to the browser
+var bookmarksInitial = {
+ "menu": [
+ { folder: "foldera" },
+ { folder: "folderb" },
+ { folder: "folderc" },
+ { folder: "folderd" },
+ ],
+
+ "menu/foldera": [{ uri: "http://www.cnn.com", title: "CNN" }],
+ "menu/folderb": [{ uri: "http://www.apple.com", title: "Apple", tags: [] }],
+ "menu/folderc": [{ uri: "http://www.yahoo.com", title: "Yahoo" }],
+
+ "menu/folderd": []
+};
+
+// a list of bookmarks to delete during a 'delete' action on P2
+var bookmarksToDelete = {
+ "menu": [
+ { folder: "foldera" },
+ { folder: "folderb" },
+ ],
+ "menu/folderc": [{ uri: "http://www.yahoo.com", title: "Yahoo" }],
+};
+
+
+// the modifications to make on P1, after P2 has synced, but before P1 has gotten
+// P2's changes
+var bookmarkMods = {
+ "menu": [
+ { folder: "foldera" },
+ { folder: "folderb" },
+ { folder: "folderc" },
+ { folder: "folderd" },
+ ],
+
+ // we move this child out of its folder (p1), after deleting the folder (p2)
+ // and expect the child to come back to p2 after sync.
+ "menu/foldera": [{
+ uri: "http://www.cnn.com",
+ title: "CNN",
+ changes: { location: "menu/folderd" }
+ }],
+
+ // we rename this child (p1) after deleting the folder (p2), and expect the child
+ // to be moved into great grandparent (menu)
+ "menu/folderb": [{
+ uri: "http://www.apple.com",
+ title: "Apple",
+ tags: [],
+ changes: { title: "Mac" }
+ }],
+
+
+ // we move this child (p1) after deleting the child (p2) and expect it to survive
+ "menu/folderc": [{
+ uri: "http://www.yahoo.com",
+ title: "Yahoo",
+ changes: { location: "menu/folderd" }
+ }],
+
+ "menu/folderd": []
+};
+
+// a list of bookmarks to delete during a 'delete' action
+var bookmarksToDelete = {
+ "menu": [
+ { folder: "foldera" },
+ { folder: "folderb" },
+ ],
+ "menu/folderc": [
+ { uri: "http://www.yahoo.com", title: "Yahoo" },
+ ],
+};
+
+
+
+// expected bookmark state after conflict resolution
+var bookmarksExpected = {
+ "menu": [
+ { folder: "folderc" },
+ { folder: "folderd" },
+ { uri: "http://www.apple.com", title: "Mac", },
+ ],
+
+ "menu/folderc": [],
+
+ "menu/folderd": [
+ { uri: "http://www.cnn.com", title: "CNN" },
+ { uri: "http://www.yahoo.com", title: "Yahoo" }
+ ]
+};
+
+// Add bookmarks to profile1 and sync.
+Phase("phase1", [
+ [Bookmarks.add, bookmarksInitial],
+ [Bookmarks.verify, bookmarksInitial],
+ [Sync],
+ [Bookmarks.verify, bookmarksInitial],
+]);
+
+// Sync to profile2 and verify that the bookmarks are present. Delete
+// bookmarks/folders, verify that it's not present, and sync
+Phase("phase2", [
+ [Sync],
+ [Bookmarks.verify, bookmarksInitial],
+ [Bookmarks.delete, bookmarksToDelete],
+ [Bookmarks.verifyNot, bookmarksToDelete],
+ [Sync]
+]);
+
+// Using profile1, modify the bookmarks, and sync *after* the modification,
+// and then sync again to propagate the reconciliation changes.
+Phase("phase3", [
+ [Bookmarks.verify, bookmarksInitial],
+ [Bookmarks.modify, bookmarkMods],
+ [Sync],
+ [Bookmarks.verify, bookmarksExpected],
+ [Bookmarks.verifyNot, bookmarksToDelete],
+]);
+
+// Back in profile2, do a sync and verify that we're in the expected state
+Phase("phase4", [
+ [Sync],
+ [Bookmarks.verify, bookmarksExpected],
+ [Bookmarks.verifyNot, bookmarksToDelete],
+]);
diff --git a/services/sync/tests/tps/test_bookmarks_in_same_named_folder.js b/services/sync/tests/tps/test_bookmarks_in_same_named_folder.js
new file mode 100644
index 000000000..e0eb9db53
--- /dev/null
+++ b/services/sync/tests/tps/test_bookmarks_in_same_named_folder.js
@@ -0,0 +1,69 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// bug 558077
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["bookmarks"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1"};
+
+var bookmarks_initial_1 = {
+ "menu": [
+ { folder: "aaa",
+ description: "foo"
+ },
+ { uri: "http://www.mozilla.com"
+ }
+ ],
+ "menu/aaa": [
+ { uri: "http://www.yahoo.com",
+ title: "testing Yahoo"
+ },
+ { uri: "http://www.google.com",
+ title: "testing Google"
+ }
+ ]
+};
+
+var bookmarks_initial_2 = {
+ "menu": [
+ { folder: "aaa",
+ description: "bar"
+ },
+ { uri: "http://www.mozilla.com"
+ }
+ ],
+ "menu/aaa": [
+ { uri: "http://bugzilla.mozilla.org/show_bug.cgi?id=%s",
+ title: "Bugzilla"
+ },
+ { uri: "http://www.apple.com",
+ tags: [ "apple" ]
+ }
+ ]
+};
+
+Phase('phase1', [
+ [Bookmarks.add, bookmarks_initial_1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial_1],
+ [Bookmarks.add, bookmarks_initial_2],
+ [Sync]
+]);
+
+Phase('phase3', [
+ [Sync],
+ // XXX [Bookmarks.verify, bookmarks_initial_1],
+ [Bookmarks.verify, bookmarks_initial_2]
+]);
diff --git a/services/sync/tests/tps/test_bug501528.js b/services/sync/tests/tps/test_bug501528.js
new file mode 100644
index 000000000..7b1566c43
--- /dev/null
+++ b/services/sync/tests/tps/test_bug501528.js
@@ -0,0 +1,79 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["passwords"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Password lists
+ */
+
+var passwords_initial = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "secret",
+ usernameField: "uname",
+ passwordField: "pword",
+ changes: {
+ password: "SeCrEt$$$"
+ }
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "jack",
+ password: "secretlogin"
+ }
+];
+
+var passwords_after_first_update = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "SeCrEt$$$",
+ usernameField: "uname",
+ passwordField: "pword"
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "jack",
+ password: "secretlogin"
+ }
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Passwords.add, passwords_initial],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Passwords.add, passwords_initial],
+ [Sync]
+]);
+
+Phase('phase3', [
+ [Sync],
+ [Passwords.verify, passwords_initial],
+ [Passwords.modify, passwords_initial],
+ [Passwords.verify, passwords_after_first_update],
+ [Sync]
+]);
+
+Phase('phase4', [
+ [Sync],
+ [Passwords.verify, passwords_after_first_update],
+]);
+
diff --git a/services/sync/tests/tps/test_bug530717.js b/services/sync/tests/tps/test_bug530717.js
new file mode 100644
index 000000000..4a11b0a27
--- /dev/null
+++ b/services/sync/tests/tps/test_bug530717.js
@@ -0,0 +1,69 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["prefs"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1"};
+
+/*
+ * Preference lists
+ */
+
+var prefs1 = [
+ { name: "browser.startup.homepage",
+ value: "http://www.getfirefox.com"
+ },
+ { name: "browser.urlbar.maxRichResults",
+ value: 20
+ },
+ { name: "privacy.clearOnShutdown.siteSettings",
+ value: true
+ }
+];
+
+var prefs2 = [
+ { name: "browser.startup.homepage",
+ value: "http://www.mozilla.com"
+ },
+ { name: "browser.urlbar.maxRichResults",
+ value: 18
+ },
+ { name: "privacy.clearOnShutdown.siteSettings",
+ value: false
+ }
+];
+
+/*
+ * Test phases
+ */
+
+// Add prefs to profile1 and sync.
+Phase('phase1', [
+ [Prefs.modify, prefs1],
+ [Prefs.verify, prefs1],
+ [Sync]
+]);
+
+// Sync profile2 and verify same prefs are present.
+Phase('phase2', [
+ [Sync],
+ [Prefs.verify, prefs1]
+]);
+
+// Using profile1, change some prefs, then do another sync with wipe-client.
+// Verify that the cloud's prefs are restored, and the recent local changes
+// discarded.
+Phase('phase3', [
+ [Prefs.modify, prefs2],
+ [Prefs.verify, prefs2],
+ [Sync, SYNC_WIPE_CLIENT],
+ [Prefs.verify, prefs1]
+]);
+
diff --git a/services/sync/tests/tps/test_bug531489.js b/services/sync/tests/tps/test_bug531489.js
new file mode 100644
index 000000000..aa2de0b3a
--- /dev/null
+++ b/services/sync/tests/tps/test_bug531489.js
@@ -0,0 +1,62 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["bookmarks"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1"};
+
+/*
+ * Bookmark asset lists: these define bookmarks that are used during the test
+ */
+
+// the initial list of bookmarks to add to the browser
+var bookmarks_initial = {
+ "menu": [
+ { folder: "foldera" },
+ { uri: "http://www.google.com",
+ title: "Google"
+ }
+ ],
+ "menu/foldera": [
+ { uri: "http://www.google.com",
+ title: "Google"
+ }
+ ],
+ "toolbar": [
+ { uri: "http://www.google.com",
+ title: "Google"
+ }
+ ]
+};
+
+/*
+ * Test phases
+ */
+
+// Add three bookmarks with the same url to different locations and sync.
+Phase('phase1', [
+ [Bookmarks.add, bookmarks_initial],
+ [Bookmarks.verify, bookmarks_initial],
+ [Sync]
+]);
+
+// Sync to profile2 and verify that all three bookmarks are present
+Phase('phase2', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
+// Sync again to profile1 and verify that all three bookmarks are still
+// present.
+Phase('phase3', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
diff --git a/services/sync/tests/tps/test_bug535326.js b/services/sync/tests/tps/test_bug535326.js
new file mode 100644
index 000000000..7875e593b
--- /dev/null
+++ b/services/sync/tests/tps/test_bug535326.js
@@ -0,0 +1,130 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["tabs"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2"};
+
+var tabs1 = [
+ { uri: "data:text/html,<html><head><title>Howdy</title></head><body>Howdy</body></html>",
+ title: "Howdy",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>America</title></head><body>America</body></html>",
+ title: "America",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Apple</title></head><body>Apple</body></html>",
+ title: "Apple",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>This</title></head><body>This</body></html>",
+ title: "This",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Bug</title></head><body>Bug</body></html>",
+ title: "Bug",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>IRC</title></head><body>IRC</body></html>",
+ title: "IRC",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Tinderbox</title></head><body>Tinderbox</body></html>",
+ title: "Tinderbox",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Fox</title></head><body>Fox</body></html>",
+ title: "Fox",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Hello</title></head><body>Hello</body></html>",
+ title: "Hello",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Eagle</title></head><body>Eagle</body></html>",
+ title: "Eagle",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Train</title></head><body>Train</body></html>",
+ title: "Train",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Macbook</title></head><body>Macbook</body></html>",
+ title: "Macbook",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Clock</title></head><body>Clock</body></html>",
+ title: "Clock",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Google</title></head><body>Google</body></html>",
+ title: "Google",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Human</title></head><body>Human</body></html>",
+ title: "Human",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Jetpack</title></head><body>Jetpack</body></html>",
+ title: "Jetpack",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Selenium</title></head><body>Selenium</body></html>",
+ title: "Selenium",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Mozilla</title></head><body>Mozilla</body></html>",
+ title: "Mozilla",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Firefox</title></head><body>Firefox</body></html>",
+ title: "Firefox",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Weave</title></head><body>Weave</body></html>",
+ title: "Weave",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Android</title></head><body>Android</body></html>",
+ title: "Android",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Bye</title></head><body>Bye</body></html>",
+ title: "Bye",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Hi</title></head><body>Hi</body></html>",
+ title: "Hi",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Final</title></head><body>Final</body></html>",
+ title: "Final",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Fennec</title></head><body>Fennec</body></html>",
+ title: "Fennec",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Mobile</title></head><body>Mobile</body></html>",
+ title: "Mobile",
+ profile: "profile1"
+ }
+];
+
+Phase('phase1', [
+ [Tabs.add, tabs1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Tabs.verify, tabs1]
+]);
+
diff --git a/services/sync/tests/tps/test_bug538298.js b/services/sync/tests/tps/test_bug538298.js
new file mode 100644
index 000000000..2a6d0c8de
--- /dev/null
+++ b/services/sync/tests/tps/test_bug538298.js
@@ -0,0 +1,93 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["bookmarks"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Bookmark asset lists: these define bookmarks that are used during the test
+ */
+
+// the initial list of bookmarks to add to the browser
+var bookmarks_initial = {
+ "toolbar": [
+ { uri: "http://www.google.com",
+ title: "Google"
+ },
+ { uri: "http://www.cnn.com",
+ title: "CNN",
+ changes: {
+ position: "Google"
+ }
+ },
+ { uri: "http://www.mozilla.com",
+ title: "Mozilla"
+ },
+ { uri: "http://www.firefox.com",
+ title: "Firefox",
+ changes: {
+ position: "Mozilla"
+ }
+ }
+ ]
+};
+
+var bookmarks_after_move = {
+ "toolbar": [
+ { uri: "http://www.cnn.com",
+ title: "CNN"
+ },
+ { uri: "http://www.google.com",
+ title: "Google"
+ },
+ { uri: "http://www.firefox.com",
+ title: "Firefox"
+ },
+ { uri: "http://www.mozilla.com",
+ title: "Mozilla"
+ }
+ ]
+};
+
+/*
+ * Test phases
+ */
+
+// Add four bookmarks to the toolbar and sync.
+Phase('phase1', [
+ [Bookmarks.add, bookmarks_initial],
+ [Bookmarks.verify, bookmarks_initial],
+ [Sync]
+]);
+
+// Sync to profile2 and verify that all four bookmarks are present.
+Phase('phase2', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
+// Change the order of the toolbar bookmarks, and sync.
+Phase('phase3', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial],
+ [Bookmarks.modify, bookmarks_initial],
+ [Bookmarks.verify, bookmarks_after_move],
+ [Sync],
+]);
+
+// Go back to profile2, sync, and verify that the bookmarks are reordered
+// as expected.
+Phase('phase4', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_after_move]
+]);
+
diff --git a/services/sync/tests/tps/test_bug546807.js b/services/sync/tests/tps/test_bug546807.js
new file mode 100644
index 000000000..873ecf2be
--- /dev/null
+++ b/services/sync/tests/tps/test_bug546807.js
@@ -0,0 +1,54 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+
+EnableEngines(["tabs"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2"};
+
+/*
+ * Tabs data
+ */
+
+var tabs1 = [
+ { uri: "about:config",
+ profile: "profile1"
+ },
+ { uri: "about:credits",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Apple</title></head><body>Apple</body></html>",
+ title: "Apple",
+ profile: "profile1"
+ }
+];
+
+var tabs_absent = [
+ { uri: "about:config",
+ profile: "profile1"
+ },
+ { uri: "about:credits",
+ profile: "profile1"
+ },
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Tabs.add, tabs1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Tabs.verifyNot, tabs_absent]
+]);
+
diff --git a/services/sync/tests/tps/test_bug556509.js b/services/sync/tests/tps/test_bug556509.js
new file mode 100644
index 000000000..a2a5f8fc3
--- /dev/null
+++ b/services/sync/tests/tps/test_bug556509.js
@@ -0,0 +1,45 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["bookmarks"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2"};
+
+
+// the initial list of bookmarks to add to the browser
+var bookmarks_initial = {
+ "menu": [
+ { folder: "testfolder",
+ description: "it's just me, a test folder"
+ }
+ ],
+ "menu/testfolder": [
+ { uri: "http://www.mozilla.com",
+ title: "Mozilla"
+ }
+ ]
+};
+
+/*
+ * Test phases
+ */
+
+// Add a bookmark folder which has a description, and sync.
+Phase('phase1', [
+ [Bookmarks.add, bookmarks_initial],
+ [Bookmarks.verify, bookmarks_initial],
+ [Sync]
+]);
+
+// Sync to profile2 and verify that the bookmark folder is created, along
+// with its description.
+Phase('phase2', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial]
+]);
diff --git a/services/sync/tests/tps/test_bug562515.js b/services/sync/tests/tps/test_bug562515.js
new file mode 100644
index 000000000..380e8ca24
--- /dev/null
+++ b/services/sync/tests/tps/test_bug562515.js
@@ -0,0 +1,105 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["bookmarks"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Bookmark lists
+ */
+
+// the initial list of bookmarks to add to the browser
+var bookmarks_initial = {
+ "menu": [
+ { uri: "http://www.google.com",
+ loadInSidebar: true,
+ tags: [ "google", "computers", "internet", "www"]
+ },
+ { uri: "http://bugzilla.mozilla.org/show_bug.cgi?id=%s",
+ title: "Bugzilla",
+ keyword: "bz"
+ },
+ { folder: "foldera" },
+ { uri: "http://www.mozilla.com" },
+ { separator: true },
+ { folder: "folderb" }
+ ],
+ "menu/foldera": [
+ { uri: "http://www.yahoo.com",
+ title: "testing Yahoo"
+ },
+ { uri: "http://www.cnn.com",
+ description: "This is a description of the site a at www.cnn.com"
+ },
+ { livemark: "Livemark1",
+ feedUri: "http://rss.wunderground.com/blog/JeffMasters/rss.xml",
+ siteUri: "http://www.wunderground.com/blog/JeffMasters/show.html"
+ }
+ ],
+ "menu/folderb": [
+ { uri: "http://www.apple.com",
+ tags: [ "apple", "mac" ]
+ }
+ ],
+ "toolbar": [
+ { uri: "place:queryType=0&sort=8&maxResults=10&beginTimeRef=1&beginTime=0",
+ title: "Visited Today"
+ }
+ ]
+};
+
+// a list of bookmarks to delete during a 'delete' action
+var bookmarks_to_delete = {
+ "menu": [
+ { uri: "http://www.google.com",
+ loadInSidebar: true,
+ tags: [ "google", "computers", "internet", "www"]
+ }
+ ],
+ "menu/foldera": [
+ { uri: "http://www.yahoo.com",
+ title: "testing Yahoo"
+ }
+ ]
+};
+
+/*
+ * Test phases
+ */
+
+// add bookmarks to profile1 and sync
+Phase('phase1', [
+ [Bookmarks.add, bookmarks_initial],
+ [Bookmarks.verify, bookmarks_initial],
+ [Sync]
+]);
+
+// sync to profile2 and verify that the bookmarks are present
+Phase('phase2', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
+// delete some bookmarks from profile1, then sync with "wipe-client"
+// set; finally, verify that the deleted bookmarks were restored.
+Phase('phase3', [
+ [Bookmarks.delete, bookmarks_to_delete],
+ [Bookmarks.verifyNot, bookmarks_to_delete],
+ [Sync, SYNC_WIPE_CLIENT],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
+// sync profile2 again, verify no bookmarks have been deleted
+Phase('phase4', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial]
+]);
diff --git a/services/sync/tests/tps/test_bug563989.js b/services/sync/tests/tps/test_bug563989.js
new file mode 100644
index 000000000..faf63de65
--- /dev/null
+++ b/services/sync/tests/tps/test_bug563989.js
@@ -0,0 +1,107 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["bookmarks"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Bookmark asset lists: these define bookmarks that are used during the test
+ */
+
+// the initial list of bookmarks to add to the browser
+var bookmarks_initial = {
+ "menu": [
+ { uri: "http://www.google.com",
+ loadInSidebar: true,
+ tags: [ "google", "computers", "internet", "www" ]
+ },
+ { uri: "http://bugzilla.mozilla.org/show_bug.cgi?id=%s",
+ title: "Bugzilla",
+ keyword: "bz"
+ },
+ { folder: "foldera" },
+ { uri: "http://www.mozilla.com" },
+ { separator: true },
+ { folder: "folderb" }
+ ],
+ "menu/foldera": [
+ { uri: "http://www.yahoo.com",
+ title: "testing Yahoo"
+ },
+ { uri: "http://www.cnn.com",
+ description: "This is a description of the site a at www.cnn.com"
+ },
+ { livemark: "Livemark1",
+ feedUri: "http://rss.wunderground.com/blog/JeffMasters/rss.xml",
+ siteUri: "http://www.wunderground.com/blog/JeffMasters/show.html"
+ }
+ ],
+ "menu/folderb": [
+ { uri: "http://www.apple.com",
+ tags: [ "apple", "mac" ]
+ }
+ ],
+ "toolbar": [
+ { uri: "place:queryType=0&sort=8&maxResults=10&beginTimeRef=1&beginTime=0",
+ title: "Visited Today"
+ }
+ ]
+};
+
+// a list of bookmarks to delete during a 'delete' action
+var bookmarks_to_delete = {
+ "menu/folderb": [
+ { uri: "http://www.apple.com",
+ tags: [ "apple", "mac" ]
+ }
+ ],
+ "toolbar": [
+ { uri: "place:queryType=0&sort=8&maxResults=10&beginTimeRef=1&beginTime=0",
+ title: "Visited Today"
+ }
+ ]
+};
+
+/*
+ * Test phases
+ */
+
+// Add bookmarks to profile1 and sync.
+Phase('phase1', [
+ [Bookmarks.add, bookmarks_initial],
+ [Bookmarks.verify, bookmarks_initial],
+ [Sync],
+]);
+
+// Sync to profile2 and verify that the bookmarks are present. Delete
+// some bookmarks, and verify that they're not present, but don't sync again.
+Phase('phase2', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial],
+ [Bookmarks.delete, bookmarks_to_delete],
+ [Bookmarks.verifyNot, bookmarks_to_delete],
+ [Bookmarks.skipValidation]
+]);
+
+// Using profile1, sync again with wipe-server set to true. Verify our
+// initial bookmarks are still all present.
+Phase('phase3', [
+ [Sync, SYNC_WIPE_REMOTE],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
+// Back in profile2, do a sync and verify that the bookmarks we had
+// deleted earlier are now restored.
+Phase('phase4', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial]
+]);
diff --git a/services/sync/tests/tps/test_bug575423.js b/services/sync/tests/tps/test_bug575423.js
new file mode 100644
index 000000000..d187c4878
--- /dev/null
+++ b/services/sync/tests/tps/test_bug575423.js
@@ -0,0 +1,85 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+
+EnableEngines(["history"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2"};
+
+/*
+ * History data
+ */
+
+// the history data to add to the browser
+var history1 = [
+ { uri: "http://www.google.com/",
+ title: "Google",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -1
+ }
+ ]
+ },
+ { uri: "http://www.cnn.com/",
+ title: "CNN",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ }
+];
+
+// Another history data to add to the browser
+var history2 = [
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ },
+ { uri: "http://www.google.com/language_tools?hl=en",
+ title: "Language Tools",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -40
+ }
+ ]
+ }
+];
+
+/*
+ * Test phases
+ */
+Phase('phase1', [
+ [History.add, history1],
+ [Sync],
+ [History.add, history2],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [History.verify, history2]
+]);
+
diff --git a/services/sync/tests/tps/test_client_wipe.js b/services/sync/tests/tps/test_client_wipe.js
new file mode 100644
index 000000000..ba9815db5
--- /dev/null
+++ b/services/sync/tests/tps/test_client_wipe.js
@@ -0,0 +1,164 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1"};
+
+/*
+ * Bookmark lists
+ */
+
+// the initial list of bookmarks to add to the browser
+var bookmarks_initial = {
+ toolbar: [
+ { uri: "http://www.google.com",
+ title: "Google"
+ },
+ { uri: "http://www.cnn.com",
+ title: "CNN",
+ changes: {
+ position: "Google"
+ }
+ },
+ { uri: "http://www.mozilla.com",
+ title: "Mozilla"
+ },
+ { uri: "http://www.firefox.com",
+ title: "Firefox",
+ changes: {
+ position: "Mozilla"
+ }
+ }
+ ]
+};
+
+var bookmarks_after_move = {
+ toolbar: [
+ { uri: "http://www.cnn.com",
+ title: "CNN"
+ },
+ { uri: "http://www.google.com",
+ title: "Google"
+ },
+ { uri: "http://www.firefox.com",
+ title: "Firefox"
+ },
+ { uri: "http://www.mozilla.com",
+ title: "Mozilla"
+ }
+ ]
+};
+
+/*
+ * Password data
+ */
+
+// Initial password data
+var passwords_initial = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "secret",
+ usernameField: "uname",
+ passwordField: "pword",
+ changes: {
+ password: "SeCrEt$$$"
+ }
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "jack",
+ password: "secretlogin"
+ }
+];
+
+// Password after first modify action has been performed
+var passwords_after_change = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "SeCrEt$$$",
+ usernameField: "uname",
+ passwordField: "pword",
+ changes: {
+ username: "james"
+ }
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "jack",
+ password: "secretlogin"
+ }
+];
+
+/*
+ * Prefs to use in the test
+ */
+var prefs1 = [
+ { name: "browser.startup.homepage",
+ value: "http://www.getfirefox.com"
+ },
+ { name: "browser.urlbar.maxRichResults",
+ value: 20
+ },
+ { name: "privacy.clearOnShutdown.siteSettings",
+ value: true
+ }
+];
+
+var prefs2 = [
+ { name: "browser.startup.homepage",
+ value: "http://www.mozilla.com"
+ },
+ { name: "browser.urlbar.maxRichResults",
+ value: 18
+ },
+ { name: "privacy.clearOnShutdown.siteSettings",
+ value: false
+ }
+];
+
+/*
+ * Test phases
+ */
+
+// Add prefs,passwords and bookmarks to profile1 and sync.
+Phase('phase1', [
+ [Passwords.add, passwords_initial],
+ [Bookmarks.add, bookmarks_initial],
+ [Prefs.modify, prefs1],
+ [Prefs.verify, prefs1],
+ [Sync]
+]);
+
+// Sync profile2 and verify same prefs,passwords and bookmarks are present.
+Phase('phase2', [
+ [Sync],
+ [Prefs.verify, prefs1],
+ [Passwords.verify, passwords_initial],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
+// Using profile1, change some prefs,bookmarks and pwds, then do another sync with wipe-client.
+// Verify that the cloud's settings are restored, and the recent local changes
+// discarded.
+Phase('phase3', [
+ [Prefs.modify, prefs2],
+ [Passwords.modify, passwords_initial],
+ [Bookmarks.modify, bookmarks_initial],
+ [Prefs.verify, prefs2],
+ [Passwords.verify, passwords_after_change],
+ [Bookmarks.verify, bookmarks_after_move],
+ [Sync, SYNC_WIPE_CLIENT],
+ [Prefs.verify, prefs1],
+ [Passwords.verify, passwords_initial],
+ [Bookmarks.verify, bookmarks_initial]
+]);
+
diff --git a/services/sync/tests/tps/test_formdata.js b/services/sync/tests/tps/test_formdata.js
new file mode 100644
index 000000000..decb58dd8
--- /dev/null
+++ b/services/sync/tests/tps/test_formdata.js
@@ -0,0 +1,97 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["forms"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Form data asset lists: these define form values that are used in the tests.
+ */
+
+var formdata1 = [
+ { fieldname: "testing",
+ value: "success",
+ date: -1
+ },
+ { fieldname: "testing",
+ value: "failure",
+ date: -2
+ },
+ { fieldname: "username",
+ value: "joe"
+ }
+];
+
+// This is currently pointless - it *looks* like it is trying to check that
+// one of the entries in formdata1 has been removed, but (a) the delete code
+// isn't active (see comments below), and (b) the way the verification works
+// means it would never do the right thing - it only checks all the entries
+// here exist, but not that they are the only entries in the DB.
+var formdata2 = [
+ { fieldname: "testing",
+ value: "success",
+ date: -1
+ },
+ { fieldname: "username",
+ value: "joe"
+ }
+];
+
+var formdata_delete = [
+ { fieldname: "testing",
+ value: "failure"
+ }
+];
+
+var formdata_new = [
+ { fieldname: "new-field",
+ value: "new-value"
+ }
+]
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Formdata.add, formdata1],
+ [Formdata.verify, formdata1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Formdata.verify, formdata1],
+]);
+
+/*
+ * Note: Weave does not support syncing deleted form data, so those
+ * tests are disabled below. See bug 568363.
+ */
+
+Phase('phase3', [
+ [Sync],
+ [Formdata.delete, formdata_delete],
+//[Formdata.verifyNot, formdata_delete],
+ [Formdata.verify, formdata2],
+ // add new data after the first Sync, ensuring the tracker works.
+ [Formdata.add, formdata_new],
+ [Sync],
+]);
+
+Phase('phase4', [
+ [Sync],
+ [Formdata.verify, formdata2],
+ [Formdata.verify, formdata_new],
+//[Formdata.verifyNot, formdata_delete]
+]);
+
+
diff --git a/services/sync/tests/tps/test_history.js b/services/sync/tests/tps/test_history.js
new file mode 100644
index 000000000..5e8bcdb92
--- /dev/null
+++ b/services/sync/tests/tps/test_history.js
@@ -0,0 +1,167 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["history"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2" };
+
+/*
+ * History asset lists: these define history entries that are used during
+ * the test
+ */
+
+// the initial list of history items to add to the browser
+var history1 = [
+ { uri: "http://www.google.com/",
+ title: "Google",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -1
+ }
+ ]
+ },
+ { uri: "http://www.cnn.com/",
+ title: "CNN",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ },
+ { uri: "http://www.google.com/language_tools?hl=en",
+ title: "Language Tools",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -40
+ }
+ ]
+ },
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 1,
+ date: -1
+ },
+ { type: 1,
+ date: -20
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ }
+];
+
+// a list of items to delete from the history
+var history_to_delete = [
+ { uri: "http://www.cnn.com/" },
+ { begin: -24,
+ end: -1
+ },
+ { host: "www.google.com" }
+];
+
+// a list which reflects items that should be in the history after
+// the above items are deleted
+var history2 = [
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ }
+];
+
+// a list which includes history entries that should not be present
+// after deletion of the history_to_delete entries
+var history_not = [
+ { uri: "http://www.google.com/",
+ title: "Google",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -1
+ }
+ ]
+ },
+ { uri: "http://www.cnn.com/",
+ title: "CNN",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ },
+ { uri: "http://www.google.com/language_tools?hl=en",
+ title: "Language Tools",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -40
+ }
+ ]
+ },
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 1,
+ date: -20
+ }
+ ]
+ }
+];
+
+/*
+ * Test phases
+ * Note: there is no test phase in which deleted history entries are
+ * synced to other clients. This functionality is not supported by
+ * Sync, see bug 446517.
+ */
+
+Phase('phase1', [
+ [History.add, history1],
+ [Sync],
+]);
+
+Phase('phase2', [
+ [Sync],
+ [History.verify, history1],
+ [History.delete, history_to_delete],
+ [History.verify, history2],
+ [History.verifyNot, history_not],
+ [Sync]
+]);
+
diff --git a/services/sync/tests/tps/test_history_collision.js b/services/sync/tests/tps/test_history_collision.js
new file mode 100644
index 000000000..625483d5d
--- /dev/null
+++ b/services/sync/tests/tps/test_history_collision.js
@@ -0,0 +1,125 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["history"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * History lists
+ */
+
+// the initial list of history to add to the browser
+var history1 = [
+ { uri: "http://www.google.com/",
+ title: "Google",
+ visits: [
+ { type: 1,
+ date: 0
+ }
+ ]
+ },
+ { uri: "http://www.cnn.com/",
+ title: "CNN",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ },
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ }
+];
+
+// the history to delete
+var history_to_delete = [
+ { uri: "http://www.cnn.com/",
+ title: "CNN"
+ },
+ { begin: -36,
+ end: -1
+ }
+];
+
+var history_not = [
+ { uri: "http://www.cnn.com/",
+ title: "CNN",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ }
+];
+
+var history_after_delete = [
+ { uri: "http://www.google.com/",
+ title: "Google",
+ visits: [
+ { type: 1,
+ date: 0
+ }
+ ]
+ },
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: 0
+ }
+ ]
+ }
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [History.add, history1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [History.add, history1],
+ [Sync, SYNC_WIPE_REMOTE]
+]);
+
+Phase('phase3', [
+ [Sync],
+ [History.verify, history1],
+ [History.delete, history_to_delete],
+ [History.verify, history_after_delete],
+ [History.verifyNot, history_not],
+ [Sync]
+]);
+
+Phase('phase4', [
+ [Sync],
+ [History.verify, history_after_delete],
+ [History.verifyNot, history_not]
+]);
+
diff --git a/services/sync/tests/tps/test_mozmill_sanity.js b/services/sync/tests/tps/test_mozmill_sanity.js
new file mode 100644
index 000000000..57d3d7ad9
--- /dev/null
+++ b/services/sync/tests/tps/test_mozmill_sanity.js
@@ -0,0 +1,24 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2" };
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [RunMozmillTest, 'mozmill_sanity.js'],
+]);
+
+Phase('phase2', [
+ [Sync],
+ [RunMozmillTest, 'mozmill_sanity2.js'],
+]);
diff --git a/services/sync/tests/tps/test_passwords.js b/services/sync/tests/tps/test_passwords.js
new file mode 100644
index 000000000..6a3ce8e1d
--- /dev/null
+++ b/services/sync/tests/tps/test_passwords.js
@@ -0,0 +1,113 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["passwords"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Password asset lists: these define password entries that are used during
+ * the test
+ */
+
+// initial password list to be loaded into the browser
+var passwords_initial = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "SeCrEt123",
+ usernameField: "uname",
+ passwordField: "pword",
+ changes: {
+ password: "zippity-do-dah"
+ }
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+// expected state of passwords after the changes in the above list are applied
+var passwords_after_first_update = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "zippity-do-dah",
+ usernameField: "uname",
+ passwordField: "pword"
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+var passwords_to_delete = [
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+var passwords_absent = [
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+// expected state of passwords after the delete operation
+var passwords_after_second_update = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "zippity-do-dah",
+ usernameField: "uname",
+ passwordField: "pword"
+ }
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Passwords.add, passwords_initial],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Passwords.verify, passwords_initial],
+ [Passwords.modify, passwords_initial],
+ [Passwords.verify, passwords_after_first_update],
+ [Sync]
+]);
+
+Phase('phase3', [
+ [Sync],
+ [Passwords.verify, passwords_after_first_update],
+ [Passwords.delete, passwords_to_delete],
+ [Passwords.verify, passwords_after_second_update],
+ [Passwords.verifyNot, passwords_absent],
+ [Sync]
+]);
+
+Phase('phase4', [
+ [Sync],
+ [Passwords.verify, passwords_after_second_update],
+ [Passwords.verifyNot, passwords_absent]
+]);
diff --git a/services/sync/tests/tps/test_prefs.js b/services/sync/tests/tps/test_prefs.js
new file mode 100644
index 000000000..3afff130d
--- /dev/null
+++ b/services/sync/tests/tps/test_prefs.js
@@ -0,0 +1,57 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["prefs"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1"};
+
+var prefs1 = [
+ { name: "browser.startup.homepage",
+ value: "http://www.getfirefox.com"
+ },
+ { name: "browser.urlbar.maxRichResults",
+ value: 20
+ },
+ { name: "privacy.clearOnShutdown.siteSettings",
+ value: true
+ }
+];
+
+var prefs2 = [
+ { name: "browser.startup.homepage",
+ value: "http://www.mozilla.com"
+ },
+ { name: "browser.urlbar.maxRichResults",
+ value: 18
+ },
+ { name: "privacy.clearOnShutdown.siteSettings",
+ value: false
+ }
+];
+
+Phase('phase1', [
+ [Prefs.modify, prefs1],
+ [Prefs.verify, prefs1],
+ [Sync],
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Prefs.verify, prefs1],
+ [Prefs.modify, prefs2],
+ [Prefs.verify, prefs2],
+ [Sync]
+]);
+
+Phase('phase3', [
+ [Sync],
+ [Prefs.verify, prefs2]
+]);
+
diff --git a/services/sync/tests/tps/test_privbrw_passwords.js b/services/sync/tests/tps/test_privbrw_passwords.js
new file mode 100644
index 000000000..ce90cc12f
--- /dev/null
+++ b/services/sync/tests/tps/test_privbrw_passwords.js
@@ -0,0 +1,104 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["passwords"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Password data
+ */
+
+// Initial password data
+var passwords_initial = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "secret",
+ usernameField: "uname",
+ passwordField: "pword",
+ changes: {
+ password: "SeCrEt$$$"
+ }
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "jack",
+ password: "secretlogin"
+ }
+];
+
+// Password after first modify action has been performed
+var passwords_after_first_change = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "SeCrEt$$$",
+ usernameField: "uname",
+ passwordField: "pword",
+ changes: {
+ username: "james"
+ }
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "jack",
+ password: "secretlogin"
+ }
+];
+
+// Password after second modify action has been performed
+var passwords_after_second_change = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "james",
+ password: "SeCrEt$$$",
+ usernameField: "uname",
+ passwordField: "pword"
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "jack",
+ password: "secretlogin"
+ }
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Passwords.add, passwords_initial],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Passwords.verify, passwords_initial],
+ [Passwords.modify, passwords_initial],
+ [Passwords.verify, passwords_after_first_change],
+ [Sync]
+]);
+
+Phase('phase3', [
+ [Sync],
+ [Windows.add, { private: true }],
+ [Passwords.verify, passwords_after_first_change],
+ [Passwords.modify, passwords_after_first_change],
+ [Passwords.verify, passwords_after_second_change],
+ [Sync]
+]);
+
+Phase('phase4', [
+ [Sync],
+ [Passwords.verify, passwords_after_second_change]
+]);
+
diff --git a/services/sync/tests/tps/test_privbrw_tabs.js b/services/sync/tests/tps/test_privbrw_tabs.js
new file mode 100644
index 000000000..e7c94e9db
--- /dev/null
+++ b/services/sync/tests/tps/test_privbrw_tabs.js
@@ -0,0 +1,87 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["tabs"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Tabs data
+ */
+
+var tabs1 = [
+ { uri: "data:text/html,<html><head><title>Firefox</title></head><body>Firefox</body></html>",
+ title: "Firefox",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Weave</title></head><body>Weave</body></html>",
+ title: "Weave",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Apple</title></head><body>Apple</body></html>",
+ title: "Apple",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>IRC</title></head><body>IRC</body></html>",
+ title: "IRC",
+ profile: "profile1"
+ }
+];
+
+var tabs2 = [
+ { uri: "data:text/html,<html><head><title>Tinderbox</title></head><body>Tinderbox</body></html>",
+ title: "Tinderbox",
+ profile: "profile2"
+ },
+ { uri: "data:text/html,<html><head><title>Fox</title></head><body>Fox</body></html>",
+ title: "Fox",
+ profile: "profile2"
+ }
+];
+
+var tabs3 = [
+ { uri: "data:text/html,<html><head><title>Jetpack</title></head><body>Jetpack</body></html>",
+ title: "Jetpack",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Selenium</title></head><body>Selenium</body></html>",
+ title: "Selenium",
+ profile: "profile1"
+ }
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Tabs.add, tabs1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Tabs.verify, tabs1],
+ [Tabs.add, tabs2],
+ [Sync]
+]);
+
+Phase('phase3', [
+ [Sync],
+ [Windows.add, { private: true }],
+ [Tabs.add, tabs3],
+ [Sync]
+]);
+
+Phase('phase4', [
+ [Sync],
+ [Tabs.verifyNot, tabs3]
+]);
diff --git a/services/sync/tests/tps/test_special_tabs.js b/services/sync/tests/tps/test_special_tabs.js
new file mode 100644
index 000000000..6e709cc1a
--- /dev/null
+++ b/services/sync/tests/tps/test_special_tabs.js
@@ -0,0 +1,78 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// Bug 532173 - Dont sync tabs like about:* , weave firstrun etc
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["tabs"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2" };
+
+var tabs1 = [
+ { uri: "data:text/html,<html><head><title>Firefox</title></head><body>Firefox</body></html>",
+ title: "Firefox",
+ profile: "profile1"
+ },
+ { uri: "about:plugins",
+ title: "About",
+ profile: "profile1"
+ },
+ { uri: "about:credits",
+ title: "Credits",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Mozilla</title></head><body>Mozilla</body></html>",
+ title: "Mozilla",
+ profile: "profile1"
+ },
+ { uri: "http://www.mozilla.com/en-US/firefox/sync/firstrun.html",
+ title: "Firstrun",
+ profile: "profile1"
+ }
+];
+
+var tabs2 = [
+ { uri: "data:text/html,<html><head><title>Firefox</title></head><body>Firefox</body></html>",
+ title: "Firefox",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Mozilla</title></head><body>Mozilla</body></html>",
+ title: "Mozilla",
+ profile: "profile1"
+ }
+];
+
+var tabs3 = [
+ { uri: "http://www.mozilla.com/en-US/firefox/sync/firstrun.html",
+ title: "Firstrun",
+ profile: "profile1"
+ },
+ { uri: "about:plugins",
+ title: "About",
+ profile: "profile1"
+ },
+ { uri: "about:credits",
+ title: "Credits",
+ profile: "profile1"
+ }
+];
+
+/*
+ * Test phases
+ */
+Phase('phase1', [
+ [Tabs.add, tabs1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Tabs.verify, tabs2],
+ [Tabs.verifyNot, tabs3]
+]);
+
diff --git a/services/sync/tests/tps/test_sync.js b/services/sync/tests/tps/test_sync.js
new file mode 100644
index 000000000..c9dd89cb5
--- /dev/null
+++ b/services/sync/tests/tps/test_sync.js
@@ -0,0 +1,424 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1",
+ "phase4": "profile2" };
+
+/*
+ * Bookmark asset lists: these define bookmarks that are used during the test
+ */
+
+// the initial list of bookmarks to be added to the browser
+var bookmarks_initial = {
+ "menu": [
+ { uri: "http://www.google.com",
+ loadInSidebar: true,
+ tags: ["google", "computers", "internet", "www"],
+ changes: {
+ title: "Google",
+ loadInSidebar: false,
+ tags: ["google", "computers", "misc"],
+ }
+ },
+ { uri: "http://bugzilla.mozilla.org/show_bug.cgi?id=%s",
+ title: "Bugzilla",
+ keyword: "bz",
+ changes: {
+ keyword: "bugzilla"
+ }
+ },
+ { folder: "foldera" },
+ { uri: "http://www.mozilla.com" },
+ { separator: true },
+ { folder: "folderb" },
+ ],
+ "menu/foldera": [
+ { uri: "http://www.yahoo.com",
+ title: "testing Yahoo",
+ changes: {
+ location: "menu/folderb"
+ }
+ },
+ { uri: "http://www.cnn.com",
+ description: "This is a description of the site a at www.cnn.com",
+ changes: {
+ uri: "http://money.cnn.com",
+ description: "new description",
+ }
+ },
+ { livemark: "Livemark1",
+ feedUri: "http://rss.wunderground.com/blog/JeffMasters/rss.xml",
+ siteUri: "http://www.wunderground.com/blog/JeffMasters/show.html",
+ changes: {
+ livemark: "LivemarkOne"
+ }
+ },
+ ],
+ "menu/folderb": [
+ { uri: "http://www.apple.com",
+ tags: ["apple", "mac"],
+ changes: {
+ uri: "http://www.apple.com/iphone/",
+ title: "iPhone",
+ location: "menu",
+ position: "Google",
+ tags: []
+ }
+ }
+ ],
+ toolbar: [
+ { uri: "place:queryType=0&sort=8&maxResults=10&beginTimeRef=1&beginTime=0",
+ title: "Visited Today"
+ }
+ ]
+};
+
+// the state of bookmarks after the first 'modify' action has been performed
+// on them
+var bookmarks_after_first_modify = {
+ "menu": [
+ { uri: "http://www.apple.com/iphone/",
+ title: "iPhone",
+ before: "Google",
+ tags: []
+ },
+ { uri: "http://www.google.com",
+ title: "Google",
+ loadInSidebar: false,
+ tags: [ "google", "computers", "misc"]
+ },
+ { uri: "http://bugzilla.mozilla.org/show_bug.cgi?id=%s",
+ title: "Bugzilla",
+ keyword: "bugzilla"
+ },
+ { folder: "foldera" },
+ { uri: "http://www.mozilla.com" },
+ { separator: true },
+ { folder: "folderb",
+ changes: {
+ location: "menu/foldera",
+ folder: "Folder B",
+ description: "folder description"
+ }
+ }
+ ],
+ "menu/foldera": [
+ { uri: "http://money.cnn.com",
+ title: "http://www.cnn.com",
+ description: "new description"
+ },
+ { livemark: "LivemarkOne",
+ feedUri: "http://rss.wunderground.com/blog/JeffMasters/rss.xml",
+ siteUri: "http://www.wunderground.com/blog/JeffMasters/show.html"
+ }
+ ],
+ "menu/folderb": [
+ { uri: "http://www.yahoo.com",
+ title: "testing Yahoo"
+ }
+ ],
+ "toolbar": [
+ { uri: "place:queryType=0&sort=8&maxResults=10&beginTimeRef=1&beginTime=0",
+ title: "Visited Today"
+ }
+ ]
+};
+
+// a list of bookmarks to delete during a 'delete' action
+var bookmarks_to_delete = {
+ "menu": [
+ { uri: "http://www.google.com",
+ title: "Google",
+ loadInSidebar: false,
+ tags: [ "google", "computers", "misc" ]
+ }
+ ]
+};
+
+// the state of bookmarks after the second 'modify' action has been performed
+// on them
+var bookmarks_after_second_modify = {
+ "menu": [
+ { uri: "http://www.apple.com/iphone/",
+ title: "iPhone"
+ },
+ { uri: "http://bugzilla.mozilla.org/show_bug.cgi?id=%s",
+ title: "Bugzilla",
+ keyword: "bugzilla"
+ },
+ { folder: "foldera" },
+ { uri: "http://www.mozilla.com" },
+ { separator: true },
+ ],
+ "menu/foldera": [
+ { uri: "http://money.cnn.com",
+ title: "http://www.cnn.com",
+ description: "new description"
+ },
+ { livemark: "LivemarkOne",
+ feedUri: "http://rss.wunderground.com/blog/JeffMasters/rss.xml",
+ siteUri: "http://www.wunderground.com/blog/JeffMasters/show.html"
+ },
+ { folder: "Folder B",
+ description: "folder description"
+ }
+ ],
+ "menu/foldera/Folder B": [
+ { uri: "http://www.yahoo.com",
+ title: "testing Yahoo"
+ }
+ ]
+};
+
+// a list of bookmarks which should not be present after the last
+// 'delete' and 'modify' actions
+var bookmarks_absent = {
+ "menu": [
+ { uri: "http://www.google.com",
+ title: "Google"
+ },
+ { folder: "folderb" },
+ { folder: "Folder B" }
+ ]
+};
+
+/*
+ * History asset lists: these define history entries that are used during
+ * the test
+ */
+
+// the initial list of history items to add to the browser
+var history_initial = [
+ { uri: "http://www.google.com/",
+ title: "Google",
+ visits: [
+ { type: 1, date: 0 },
+ { type: 2, date: -1 }
+ ]
+ },
+ { uri: "http://www.cnn.com/",
+ title: "CNN",
+ visits: [
+ { type: 1, date: -1 },
+ { type: 2, date: -36 }
+ ]
+ },
+ { uri: "http://www.google.com/language_tools?hl=en",
+ title: "Language Tools",
+ visits: [
+ { type: 1, date: 0 },
+ { type: 2, date: -40 }
+ ]
+ },
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1, date: 0 },
+ { type: 1, date: -1 },
+ { type: 1, date: -20 },
+ { type: 2, date: -36 }
+ ]
+ }
+];
+
+// a list of history entries to delete during a 'delete' action
+var history_to_delete = [
+ { uri: "http://www.cnn.com/" },
+ { begin: -24,
+ end: -1 },
+ { host: "www.google.com" }
+];
+
+// the expected history entries after the first 'delete' action
+var history_after_delete = [
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ }
+];
+
+// history entries expected to not exist after a 'delete' action
+var history_absent = [
+ { uri: "http://www.google.com/",
+ title: "Google",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -1
+ }
+ ]
+ },
+ { uri: "http://www.cnn.com/",
+ title: "CNN",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 2,
+ date: -36
+ }
+ ]
+ },
+ { uri: "http://www.google.com/language_tools?hl=en",
+ title: "Language Tools",
+ visits: [
+ { type: 1,
+ date: 0
+ },
+ { type: 2,
+ date: -40
+ }
+ ]
+ },
+ { uri: "http://www.mozilla.com/",
+ title: "Mozilla",
+ visits: [
+ { type: 1,
+ date: -1
+ },
+ { type: 1,
+ date: -20
+ }
+ ]
+ }
+];
+
+/*
+ * Password asset lists: these define password entries that are used during
+ * the test
+ */
+
+// the initial list of passwords to add to the browser
+var passwords_initial = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "SeCrEt123",
+ usernameField: "uname",
+ passwordField: "pword",
+ changes: {
+ password: "zippity-do-dah"
+ }
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+// the expected state of passwords after the first 'modify' action
+var passwords_after_first_modify = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "zippity-do-dah",
+ usernameField: "uname",
+ passwordField: "pword"
+ },
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+// a list of passwords to delete during a 'delete' action
+var passwords_to_delete = [
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+// a list of passwords expected to be absent after 'delete' and 'modify'
+// actions
+var passwords_absent = [
+ { hostname: "http://www.example.com",
+ realm: "login",
+ username: "joe",
+ password: "secretlogin"
+ }
+];
+
+// the expected state of passwords after the seconds 'modify' action
+var passwords_after_second_modify = [
+ { hostname: "http://www.example.com",
+ submitURL: "http://login.example.com",
+ username: "joe",
+ password: "zippity-do-dah",
+ usernameField: "uname",
+ passwordField: "pword"
+ }
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Bookmarks.add, bookmarks_initial],
+ [Passwords.add, passwords_initial],
+ [History.add, history_initial],
+ [Sync],
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_initial],
+ [Passwords.verify, passwords_initial],
+ [History.verify, history_initial],
+ [Bookmarks.modify, bookmarks_initial],
+ [Passwords.modify, passwords_initial],
+ [History.delete, history_to_delete],
+ [Bookmarks.verify, bookmarks_after_first_modify],
+ [Passwords.verify, passwords_after_first_modify],
+ [History.verify, history_after_delete],
+ [History.verifyNot, history_absent],
+ [Sync],
+]);
+
+Phase('phase3', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_after_first_modify],
+ [Passwords.verify, passwords_after_first_modify],
+ [History.verify, history_after_delete],
+ [Bookmarks.modify, bookmarks_after_first_modify],
+ [Passwords.modify, passwords_after_first_modify],
+ [Bookmarks.delete, bookmarks_to_delete],
+ [Passwords.delete, passwords_to_delete],
+ [Bookmarks.verify, bookmarks_after_second_modify],
+ [Passwords.verify, passwords_after_second_modify],
+ [Bookmarks.verifyNot, bookmarks_absent],
+ [Passwords.verifyNot, passwords_absent],
+ [Sync],
+]);
+
+Phase('phase4', [
+ [Sync],
+ [Bookmarks.verify, bookmarks_after_second_modify],
+ [Passwords.verify, passwords_after_second_modify],
+ [Bookmarks.verifyNot, bookmarks_absent],
+ [Passwords.verifyNot, passwords_absent],
+ [History.verifyNot, history_absent],
+]);
+
+
diff --git a/services/sync/tests/tps/test_tabs.js b/services/sync/tests/tps/test_tabs.js
new file mode 100644
index 000000000..03f277709
--- /dev/null
+++ b/services/sync/tests/tps/test_tabs.js
@@ -0,0 +1,59 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/*
+ * The list of phases mapped to their corresponding profiles. The object
+ * here must be in strict JSON format, as it will get parsed by the Python
+ * testrunner (no single quotes, extra comma's, etc).
+ */
+EnableEngines(["tabs"]);
+
+var phases = { "phase1": "profile1",
+ "phase2": "profile2",
+ "phase3": "profile1"};
+
+/*
+ * Tab lists.
+ */
+
+var tabs1 = [
+ { uri: "http://mozqa.com/data/firefox/layout/mozilla.html",
+ title: "Mozilla",
+ profile: "profile1"
+ },
+ { uri: "data:text/html,<html><head><title>Hello</title></head><body>Hello</body></html>",
+ title: "Hello",
+ profile: "profile1"
+ }
+];
+
+var tabs2 = [
+ { uri: "http://mozqa.com/data/firefox/layout/mozilla_community.html",
+ title: "Mozilla Community",
+ profile: "profile2"
+ },
+ { uri: "data:text/html,<html><head><title>Bye</title></head><body>Bye</body></html>",
+ profile: "profile2"
+ }
+];
+
+/*
+ * Test phases
+ */
+
+Phase('phase1', [
+ [Tabs.add, tabs1],
+ [Sync]
+]);
+
+Phase('phase2', [
+ [Sync],
+ [Tabs.verify, tabs1],
+ [Tabs.add, tabs2],
+ [Sync]
+]);
+
+Phase('phase3', [
+ [Sync],
+ [Tabs.verify, tabs2]
+]);
diff --git a/services/sync/tests/unit/addon1-search.xml b/services/sync/tests/unit/addon1-search.xml
new file mode 100644
index 000000000..1211d0c97
--- /dev/null
+++ b/services/sync/tests/unit/addon1-search.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5617">
+ <name>Non-Restartless Test Extension</name>
+ <type id="1">Extension</type>
+ <guid>addon1@tests.mozilla.org</guid>
+ <slug>addon11</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>xpcshell@tests.mozilla.org</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="485">http://127.0.0.1:8888/addon1.xpi</install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+ </addon>
+</searchresults>
diff --git a/services/sync/tests/unit/bootstrap1-search.xml b/services/sync/tests/unit/bootstrap1-search.xml
new file mode 100644
index 000000000..b4538fba0
--- /dev/null
+++ b/services/sync/tests/unit/bootstrap1-search.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5617">
+ <name>Restartless Test Extension</name>
+ <type id="1">Extension</type>
+ <guid>bootstrap1@tests.mozilla.org</guid>
+ <slug>bootstrap1</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>xpcshell@tests.mozilla.org</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="485">http://127.0.0.1:8888/bootstrap1.xpi</install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+ </addon>
+</searchresults>
diff --git a/services/sync/tests/unit/fake_login_manager.js b/services/sync/tests/unit/fake_login_manager.js
new file mode 100644
index 000000000..6f3148c45
--- /dev/null
+++ b/services/sync/tests/unit/fake_login_manager.js
@@ -0,0 +1,38 @@
+Cu.import("resource://services-sync/util.js");
+
+// ----------------------------------------
+// Fake Sample Data
+// ----------------------------------------
+
+var fakeSampleLogins = [
+ // Fake nsILoginInfo object.
+ {hostname: "www.boogle.com",
+ formSubmitURL: "http://www.boogle.com/search",
+ httpRealm: "",
+ username: "",
+ password: "",
+ usernameField: "test_person",
+ passwordField: "test_password"}
+];
+
+// ----------------------------------------
+// Fake Login Manager
+// ----------------------------------------
+
+function FakeLoginManager(fakeLogins) {
+ this.fakeLogins = fakeLogins;
+
+ let self = this;
+
+ // Use a fake nsILoginManager object.
+ delete Services.logins;
+ Services.logins = {
+ removeAllLogins: function() { self.fakeLogins = []; },
+ getAllLogins: function() { return self.fakeLogins; },
+ addLogin: function(login) {
+ getTestLogger().info("nsILoginManager.addLogin() called " +
+ "with hostname '" + login.hostname + "'.");
+ self.fakeLogins.push(login);
+ }
+ };
+}
diff --git a/services/sync/tests/unit/head_appinfo.js b/services/sync/tests/unit/head_appinfo.js
new file mode 100644
index 000000000..d2a680df5
--- /dev/null
+++ b/services/sync/tests/unit/head_appinfo.js
@@ -0,0 +1,57 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+
+var gSyncProfile;
+
+gSyncProfile = do_get_profile();
+
+// Init FormHistoryStartup and pretend we opened a profile.
+var fhs = Cc["@mozilla.org/satchel/form-history-startup;1"]
+ .getService(Ci.nsIObserver);
+fhs.observe(null, "profile-after-change", null);
+
+// An app is going to have some prefs set which xpcshell tests don't.
+Services.prefs.setCharPref("identity.sync.tokenserver.uri", "http://token-server");
+
+// Set the validation prefs to attempt validation every time to avoid non-determinism.
+Services.prefs.setIntPref("services.sync.validation.interval", 0);
+Services.prefs.setIntPref("services.sync.validation.percentageChance", 100);
+Services.prefs.setIntPref("services.sync.validation.maxRecords", -1);
+Services.prefs.setBoolPref("services.sync.validation.enabled", true);
+
+// Make sure to provide the right OS so crypto loads the right binaries
+function getOS() {
+ switch (mozinfo.os) {
+ case "win":
+ return "WINNT";
+ case "mac":
+ return "Darwin";
+ default:
+ return "Linux";
+ }
+}
+
+Cu.import("resource://testing-common/AppInfo.jsm", this);
+updateAppInfo({
+ name: "XPCShell",
+ ID: "xpcshell@tests.mozilla.org",
+ version: "1",
+ platformVersion: "",
+ OS: getOS(),
+});
+
+// Register resource aliases. Normally done in SyncComponents.manifest.
+function addResourceAlias() {
+ const resProt = Services.io.getProtocolHandler("resource")
+ .QueryInterface(Ci.nsIResProtocolHandler);
+ for (let s of ["common", "sync", "crypto"]) {
+ let uri = Services.io.newURI("resource://gre/modules/services-" + s + "/", null,
+ null);
+ resProt.setSubstitution("services-" + s, uri);
+ }
+}
+addResourceAlias();
diff --git a/services/sync/tests/unit/head_errorhandler_common.js b/services/sync/tests/unit/head_errorhandler_common.js
new file mode 100644
index 000000000..f4af60d9d
--- /dev/null
+++ b/services/sync/tests/unit/head_errorhandler_common.js
@@ -0,0 +1,112 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines.js");
+
+// Common code for test_errorhandler_{1,2}.js -- pulled out to make it less
+// monolithic and take less time to execute.
+const EHTestsCommon = {
+
+ service_unavailable(request, response) {
+ let body = "Service Unavailable";
+ response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
+ response.setHeader("Retry-After", "42");
+ response.bodyOutputStream.write(body, body.length);
+ },
+
+ sync_httpd_setup() {
+ let global = new ServerWBO("global", {
+ syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {clients: {version: Service.clientsEngine.version,
+ syncID: Service.clientsEngine.syncID},
+ catapult: {version: Service.engineManager.get("catapult").version,
+ syncID: Service.engineManager.get("catapult").syncID}}
+ });
+ let clientsColl = new ServerCollection({}, true);
+
+ // Tracking info/collections.
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+
+ let handler_401 = httpd_handler(401, "Unauthorized");
+ return httpd_setup({
+ // Normal server behaviour.
+ "/1.1/johndoe/storage/meta/global": upd("meta", global.handler()),
+ "/1.1/johndoe/info/collections": collectionsHelper.handler,
+ "/1.1/johndoe/storage/crypto/keys":
+ upd("crypto", (new ServerWBO("keys")).handler()),
+ "/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler()),
+
+ // Credentials are wrong or node reallocated.
+ "/1.1/janedoe/storage/meta/global": handler_401,
+ "/1.1/janedoe/info/collections": handler_401,
+
+ // Maintenance or overloaded (503 + Retry-After) at info/collections.
+ "/maintenance/1.1/broken.info/info/collections": EHTestsCommon.service_unavailable,
+
+ // Maintenance or overloaded (503 + Retry-After) at meta/global.
+ "/maintenance/1.1/broken.meta/storage/meta/global": EHTestsCommon.service_unavailable,
+ "/maintenance/1.1/broken.meta/info/collections": collectionsHelper.handler,
+
+ // Maintenance or overloaded (503 + Retry-After) at crypto/keys.
+ "/maintenance/1.1/broken.keys/storage/meta/global": upd("meta", global.handler()),
+ "/maintenance/1.1/broken.keys/info/collections": collectionsHelper.handler,
+ "/maintenance/1.1/broken.keys/storage/crypto/keys": EHTestsCommon.service_unavailable,
+
+ // Maintenance or overloaded (503 + Retry-After) at wiping collection.
+ "/maintenance/1.1/broken.wipe/info/collections": collectionsHelper.handler,
+ "/maintenance/1.1/broken.wipe/storage/meta/global": upd("meta", global.handler()),
+ "/maintenance/1.1/broken.wipe/storage/crypto/keys":
+ upd("crypto", (new ServerWBO("keys")).handler()),
+ "/maintenance/1.1/broken.wipe/storage": EHTestsCommon.service_unavailable,
+ "/maintenance/1.1/broken.wipe/storage/clients": upd("clients", clientsColl.handler()),
+ "/maintenance/1.1/broken.wipe/storage/catapult": EHTestsCommon.service_unavailable
+ });
+ },
+
+ CatapultEngine: (function() {
+ function CatapultEngine() {
+ SyncEngine.call(this, "Catapult", Service);
+ }
+ CatapultEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ exception: null, // tests fill this in
+ _sync: function _sync() {
+ if (this.exception) {
+ throw this.exception;
+ }
+ }
+ };
+
+ return CatapultEngine;
+ }()),
+
+
+ generateCredentialsChangedFailure() {
+ // Make sync fail due to changed credentials. We simply re-encrypt
+ // the keys with a different Sync Key, without changing the local one.
+ let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562");
+ let keys = Service.collectionKeys.asWBO();
+ keys.encrypt(newSyncKeyBundle);
+ keys.upload(Service.resource(Service.cryptoKeysURL));
+ },
+
+ setUp(server) {
+ return configureIdentity({ username: "johndoe" }).then(
+ () => {
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+ }
+ ).then(
+ () => EHTestsCommon.generateAndUploadKeys()
+ );
+ },
+
+ generateAndUploadKeys() {
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+ }
+};
diff --git a/services/sync/tests/unit/head_helpers.js b/services/sync/tests/unit/head_helpers.js
new file mode 100644
index 000000000..3c59e1de5
--- /dev/null
+++ b/services/sync/tests/unit/head_helpers.js
@@ -0,0 +1,446 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://testing-common/services/common/utils.js");
+Cu.import("resource://testing-common/PlacesTestUtils.jsm");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+
+XPCOMUtils.defineLazyGetter(this, 'SyncPingSchema', function() {
+ let ns = {};
+ Cu.import("resource://gre/modules/FileUtils.jsm", ns);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"]
+ .createInstance(Ci.nsIFileInputStream);
+ let jsonReader = Cc["@mozilla.org/dom/json;1"]
+ .createInstance(Components.interfaces.nsIJSON);
+ let schema;
+ try {
+ let schemaFile = do_get_file("sync_ping_schema.json");
+ stream.init(schemaFile, ns.FileUtils.MODE_RDONLY, ns.FileUtils.PERMS_FILE, 0);
+ schema = jsonReader.decodeFromStream(stream, stream.available());
+ } finally {
+ stream.close();
+ }
+
+ // Allow tests to make whatever engines they want, this shouldn't cause
+ // validation failure.
+ schema.definitions.engine.properties.name = { type: "string" };
+ return schema;
+});
+
+XPCOMUtils.defineLazyGetter(this, 'SyncPingValidator', function() {
+ let ns = {};
+ Cu.import("resource://testing-common/ajv-4.1.1.js", ns);
+ let ajv = new ns.Ajv({ async: "co*" });
+ return ajv.compile(SyncPingSchema);
+});
+
+var provider = {
+ getFile: function(prop, persistent) {
+ persistent.value = true;
+ switch (prop) {
+ case "ExtPrefDL":
+ return [Services.dirsvc.get("CurProcD", Ci.nsIFile)];
+ default:
+ throw Cr.NS_ERROR_FAILURE;
+ }
+ },
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsIDirectoryServiceProvider])
+};
+Services.dirsvc.QueryInterface(Ci.nsIDirectoryService).registerProvider(provider);
+
+// This is needed for loadAddonTestFunctions().
+var gGlobalScope = this;
+
+function ExtensionsTestPath(path) {
+ if (path[0] != "/") {
+ throw Error("Path must begin with '/': " + path);
+ }
+
+ return "../../../../toolkit/mozapps/extensions/test/xpcshell" + path;
+}
+
+/**
+ * Loads the AddonManager test functions by importing its test file.
+ *
+ * This should be called in the global scope of any test file needing to
+ * interface with the AddonManager. It should only be called once, or the
+ * universe will end.
+ */
+function loadAddonTestFunctions() {
+ const path = ExtensionsTestPath("/head_addons.js");
+ let file = do_get_file(path);
+ let uri = Services.io.newFileURI(file);
+ Services.scriptloader.loadSubScript(uri.spec, gGlobalScope);
+ createAppInfo("xpcshell@tests.mozilla.org", "XPCShell", "1", "1.9.2");
+}
+
+function webExtensionsTestPath(path) {
+ if (path[0] != "/") {
+ throw Error("Path must begin with '/': " + path);
+ }
+
+ return "../../../../toolkit/components/extensions/test/xpcshell" + path;
+}
+
+/**
+ * Loads the WebExtension test functions by importing its test file.
+ */
+function loadWebExtensionTestFunctions() {
+ const path = webExtensionsTestPath("/head_sync.js");
+ let file = do_get_file(path);
+ let uri = Services.io.newFileURI(file);
+ Services.scriptloader.loadSubScript(uri.spec, gGlobalScope);
+}
+
+function getAddonInstall(name) {
+ let f = do_get_file(ExtensionsTestPath("/addons/" + name + ".xpi"));
+ let cb = Async.makeSyncCallback();
+ AddonManager.getInstallForFile(f, cb);
+
+ return Async.waitForSyncCallback(cb);
+}
+
+/**
+ * Obtains an addon from the add-on manager by id.
+ *
+ * This is merely a synchronous wrapper.
+ *
+ * @param id
+ * ID of add-on to fetch
+ * @return addon object on success or undefined or null on failure
+ */
+function getAddonFromAddonManagerByID(id) {
+ let cb = Async.makeSyncCallback();
+ AddonManager.getAddonByID(id, cb);
+ return Async.waitForSyncCallback(cb);
+}
+
+/**
+ * Installs an add-on synchronously from an addonInstall
+ *
+ * @param install addonInstall instance to install
+ */
+function installAddonFromInstall(install) {
+ let cb = Async.makeSyncCallback();
+ let listener = {onInstallEnded: cb};
+ AddonManager.addInstallListener(listener);
+ install.install();
+ Async.waitForSyncCallback(cb);
+ AddonManager.removeAddonListener(listener);
+
+ do_check_neq(null, install.addon);
+ do_check_neq(null, install.addon.syncGUID);
+
+ return install.addon;
+}
+
+/**
+ * Convenience function to install an add-on from the extensions unit tests.
+ *
+ * @param name
+ * String name of add-on to install. e.g. test_install1
+ * @return addon object that was installed
+ */
+function installAddon(name) {
+ let install = getAddonInstall(name);
+ do_check_neq(null, install);
+ return installAddonFromInstall(install);
+}
+
+/**
+ * Convenience function to uninstall an add-on synchronously.
+ *
+ * @param addon
+ * Addon instance to uninstall
+ */
+function uninstallAddon(addon) {
+ let cb = Async.makeSyncCallback();
+ let listener = {onUninstalled: function(uninstalled) {
+ if (uninstalled.id == addon.id) {
+ AddonManager.removeAddonListener(listener);
+ cb(uninstalled);
+ }
+ }};
+
+ AddonManager.addAddonListener(listener);
+ addon.uninstall();
+ Async.waitForSyncCallback(cb);
+}
+
+function generateNewKeys(collectionKeys, collections=null) {
+ let wbo = collectionKeys.generateNewKeysWBO(collections);
+ let modified = new_timestamp();
+ collectionKeys.setContents(wbo.cleartext, modified);
+}
+
+// Helpers for testing open tabs.
+// These reflect part of the internal structure of TabEngine,
+// and stub part of Service.wm.
+
+function mockShouldSkipWindow (win) {
+ return win.closed ||
+ win.mockIsPrivate;
+}
+
+function mockGetTabState (tab) {
+ return tab;
+}
+
+function mockGetWindowEnumerator(url, numWindows, numTabs, indexes, moreURLs) {
+ let elements = [];
+
+ function url2entry(url) {
+ return {
+ url: ((typeof url == "function") ? url() : url),
+ title: "title"
+ };
+ }
+
+ for (let w = 0; w < numWindows; ++w) {
+ let tabs = [];
+ let win = {
+ closed: false,
+ mockIsPrivate: false,
+ gBrowser: {
+ tabs: tabs,
+ },
+ };
+ elements.push(win);
+
+ for (let t = 0; t < numTabs; ++t) {
+ tabs.push(TestingUtils.deepCopy({
+ index: indexes ? indexes() : 1,
+ entries: (moreURLs ? [url].concat(moreURLs()) : [url]).map(url2entry),
+ attributes: {
+ image: "image"
+ },
+ lastAccessed: 1499
+ }));
+ }
+ }
+
+ // Always include a closed window and a private window.
+ elements.push({
+ closed: true,
+ mockIsPrivate: false,
+ gBrowser: {
+ tabs: [],
+ },
+ });
+
+ elements.push({
+ closed: false,
+ mockIsPrivate: true,
+ gBrowser: {
+ tabs: [],
+ },
+ });
+
+ return {
+ hasMoreElements: function () {
+ return elements.length;
+ },
+ getNext: function () {
+ return elements.shift();
+ },
+ };
+}
+
+// Helper that allows checking array equality.
+function do_check_array_eq(a1, a2) {
+ do_check_eq(a1.length, a2.length);
+ for (let i = 0; i < a1.length; ++i) {
+ do_check_eq(a1[i], a2[i]);
+ }
+}
+
+// Helper function to get the sync telemetry and add the typically used test
+// engine names to its list of allowed engines.
+function get_sync_test_telemetry() {
+ let ns = {};
+ Cu.import("resource://services-sync/telemetry.js", ns);
+ let testEngines = ["rotary", "steam", "sterling", "catapult"];
+ for (let engineName of testEngines) {
+ ns.SyncTelemetry.allowedEngines.add(engineName);
+ }
+ ns.SyncTelemetry.submissionInterval = -1;
+ return ns.SyncTelemetry;
+}
+
+function assert_valid_ping(record) {
+ // This is called as the test harness tears down due to shutdown. This
+ // will typically have no recorded syncs, and the validator complains about
+ // it. So ignore such records (but only ignore when *both* shutdown and
+ // no Syncs - either of them not being true might be an actual problem)
+ if (record && (record.why != "shutdown" || record.syncs.length != 0)) {
+ if (!SyncPingValidator(record)) {
+ deepEqual([], SyncPingValidator.errors, "Sync telemetry ping validation failed");
+ }
+ equal(record.version, 1);
+ record.syncs.forEach(p => {
+ lessOrEqual(p.when, Date.now());
+ if (p.devices) {
+ ok(!p.devices.some(device => device.id == p.deviceID));
+ equal(new Set(p.devices.map(device => device.id)).size,
+ p.devices.length, "Duplicate device ids in ping devices list");
+ }
+ });
+ }
+}
+
+// Asserts that `ping` is a ping that doesn't contain any failure information
+function assert_success_ping(ping) {
+ ok(!!ping);
+ assert_valid_ping(ping);
+ ping.syncs.forEach(record => {
+ ok(!record.failureReason);
+ equal(undefined, record.status);
+ greater(record.engines.length, 0);
+ for (let e of record.engines) {
+ ok(!e.failureReason);
+ equal(undefined, e.status);
+ if (e.validation) {
+ equal(undefined, e.validation.problems);
+ equal(undefined, e.validation.failureReason);
+ }
+ if (e.outgoing) {
+ for (let o of e.outgoing) {
+ equal(undefined, o.failed);
+ notEqual(undefined, o.sent);
+ }
+ }
+ if (e.incoming) {
+ equal(undefined, e.incoming.failed);
+ equal(undefined, e.incoming.newFailed);
+ notEqual(undefined, e.incoming.applied || e.incoming.reconciled);
+ }
+ }
+ });
+}
+
+// Hooks into telemetry to validate all pings after calling.
+function validate_all_future_pings() {
+ let telem = get_sync_test_telemetry();
+ telem.submit = assert_valid_ping;
+}
+
+function wait_for_ping(callback, allowErrorPings, getFullPing = false) {
+ return new Promise(resolve => {
+ let telem = get_sync_test_telemetry();
+ let oldSubmit = telem.submit;
+ telem.submit = function(record) {
+ telem.submit = oldSubmit;
+ if (allowErrorPings) {
+ assert_valid_ping(record);
+ } else {
+ assert_success_ping(record);
+ }
+ if (getFullPing) {
+ resolve(record);
+ } else {
+ equal(record.syncs.length, 1);
+ resolve(record.syncs[0]);
+ }
+ };
+ callback();
+ });
+}
+
+// Short helper for wait_for_ping
+function sync_and_validate_telem(allowErrorPings, getFullPing = false) {
+ return wait_for_ping(() => Service.sync(), allowErrorPings, getFullPing);
+}
+
+// Used for the (many) cases where we do a 'partial' sync, where only a single
+// engine is actually synced, but we still want to ensure we're generating a
+// valid ping. Returns a promise that resolves to the ping, or rejects with the
+// thrown error after calling an optional callback.
+function sync_engine_and_validate_telem(engine, allowErrorPings, onError) {
+ return new Promise((resolve, reject) => {
+ let telem = get_sync_test_telemetry();
+ let caughtError = null;
+ // Clear out status, so failures from previous syncs won't show up in the
+ // telemetry ping.
+ let ns = {};
+ Cu.import("resource://services-sync/status.js", ns);
+ ns.Status._engines = {};
+ ns.Status.partial = false;
+ // Ideally we'd clear these out like we do with engines, (probably via
+ // Status.resetSync()), but this causes *numerous* tests to fail, so we just
+ // assume that if no failureReason or engine failures are set, and the
+ // status properties are the same as they were initially, that it's just
+ // a leftover.
+ // This is only an issue since we're triggering the sync of just one engine,
+ // without doing any other parts of the sync.
+ let initialServiceStatus = ns.Status._service;
+ let initialSyncStatus = ns.Status._sync;
+
+ let oldSubmit = telem.submit;
+ telem.submit = function(ping) {
+ telem.submit = oldSubmit;
+ ping.syncs.forEach(record => {
+ if (record && record.status) {
+ // did we see anything to lead us to believe that something bad actually happened
+ let realProblem = record.failureReason || record.engines.some(e => {
+ if (e.failureReason || e.status) {
+ return true;
+ }
+ if (e.outgoing && e.outgoing.some(o => o.failed > 0)) {
+ return true;
+ }
+ return e.incoming && e.incoming.failed;
+ });
+ if (!realProblem) {
+ // no, so if the status is the same as it was initially, just assume
+ // that its leftover and that we can ignore it.
+ if (record.status.sync && record.status.sync == initialSyncStatus) {
+ delete record.status.sync;
+ }
+ if (record.status.service && record.status.service == initialServiceStatus) {
+ delete record.status.service;
+ }
+ if (!record.status.sync && !record.status.service) {
+ delete record.status;
+ }
+ }
+ }
+ });
+ if (allowErrorPings) {
+ assert_valid_ping(ping);
+ } else {
+ assert_success_ping(ping);
+ }
+ equal(ping.syncs.length, 1);
+ if (caughtError) {
+ if (onError) {
+ onError(ping.syncs[0]);
+ }
+ reject(caughtError);
+ } else {
+ resolve(ping.syncs[0]);
+ }
+ }
+ Svc.Obs.notify("weave:service:sync:start");
+ try {
+ engine.sync();
+ } catch (e) {
+ caughtError = e;
+ }
+ if (caughtError) {
+ Svc.Obs.notify("weave:service:sync:error", caughtError);
+ } else {
+ Svc.Obs.notify("weave:service:sync:finish");
+ }
+ });
+}
+
+// Avoid an issue where `client.name2` containing unicode characters causes
+// a number of tests to fail, due to them assuming that we do not need to utf-8
+// encode or decode data sent through the mocked server (see bug 1268912).
+Utils.getDefaultDeviceName = function() {
+ return "Test device name";
+};
+
+
diff --git a/services/sync/tests/unit/head_http_server.js b/services/sync/tests/unit/head_http_server.js
new file mode 100644
index 000000000..26f62310c
--- /dev/null
+++ b/services/sync/tests/unit/head_http_server.js
@@ -0,0 +1,1044 @@
+var Cm = Components.manager;
+
+// Shared logging for all HTTP server functions.
+Cu.import("resource://gre/modules/Log.jsm");
+const SYNC_HTTP_LOGGER = "Sync.Test.Server";
+const SYNC_API_VERSION = "1.1";
+
+// Use the same method that record.js does, which mirrors the server.
+// The server returns timestamps with 1/100 sec granularity. Note that this is
+// subject to change: see Bug 650435.
+function new_timestamp() {
+ return Math.round(Date.now() / 10) / 100;
+}
+
+function return_timestamp(request, response, timestamp) {
+ if (!timestamp) {
+ timestamp = new_timestamp();
+ }
+ let body = "" + timestamp;
+ response.setHeader("X-Weave-Timestamp", body);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ return timestamp;
+}
+
+function basic_auth_header(user, password) {
+ return "Basic " + btoa(user + ":" + Utils.encodeUTF8(password));
+}
+
+function basic_auth_matches(req, user, password) {
+ if (!req.hasHeader("Authorization")) {
+ return false;
+ }
+
+ let expected = basic_auth_header(user, Utils.encodeUTF8(password));
+ return req.getHeader("Authorization") == expected;
+}
+
+function httpd_basic_auth_handler(body, metadata, response) {
+ if (basic_auth_matches(metadata, "guest", "guest")) {
+ response.setStatusLine(metadata.httpVersion, 200, "OK, authorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ } else {
+ body = "This path exists and is protected - failed";
+ response.setStatusLine(metadata.httpVersion, 401, "Unauthorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ }
+ response.bodyOutputStream.write(body, body.length);
+}
+
+/*
+ * Represent a WBO on the server
+ */
+function ServerWBO(id, initialPayload, modified) {
+ if (!id) {
+ throw "No ID for ServerWBO!";
+ }
+ this.id = id;
+ if (!initialPayload) {
+ return;
+ }
+
+ if (typeof initialPayload == "object") {
+ initialPayload = JSON.stringify(initialPayload);
+ }
+ this.payload = initialPayload;
+ this.modified = modified || new_timestamp();
+}
+ServerWBO.prototype = {
+
+ get data() {
+ return JSON.parse(this.payload);
+ },
+
+ get: function() {
+ return JSON.stringify(this, ["id", "modified", "payload"]);
+ },
+
+ put: function(input) {
+ input = JSON.parse(input);
+ this.payload = input.payload;
+ this.modified = new_timestamp();
+ },
+
+ delete: function() {
+ delete this.payload;
+ delete this.modified;
+ },
+
+ // This handler sets `newModified` on the response body if the collection
+ // timestamp has changed. This allows wrapper handlers to extract information
+ // that otherwise would exist only in the body stream.
+ handler: function() {
+ let self = this;
+
+ return function(request, response) {
+ var statusCode = 200;
+ var status = "OK";
+ var body;
+
+ switch(request.method) {
+ case "GET":
+ if (self.payload) {
+ body = self.get();
+ } else {
+ statusCode = 404;
+ status = "Not Found";
+ body = "Not Found";
+ }
+ break;
+
+ case "PUT":
+ self.put(readBytesFromInputStream(request.bodyInputStream));
+ body = JSON.stringify(self.modified);
+ response.setHeader("Content-Type", "application/json");
+ response.newModified = self.modified;
+ break;
+
+ case "DELETE":
+ self.delete();
+ let ts = new_timestamp();
+ body = JSON.stringify(ts);
+ response.setHeader("Content-Type", "application/json");
+ response.newModified = ts;
+ break;
+ }
+ response.setHeader("X-Weave-Timestamp", "" + new_timestamp(), false);
+ response.setStatusLine(request.httpVersion, statusCode, status);
+ response.bodyOutputStream.write(body, body.length);
+ };
+ }
+
+};
+
+
+/**
+ * Represent a collection on the server. The '_wbos' attribute is a
+ * mapping of id -> ServerWBO objects.
+ *
+ * Note that if you want these records to be accessible individually,
+ * you need to register their handlers with the server separately, or use a
+ * containing HTTP server that will do so on your behalf.
+ *
+ * @param wbos
+ * An object mapping WBO IDs to ServerWBOs.
+ * @param acceptNew
+ * If true, POSTs to this collection URI will result in new WBOs being
+ * created and wired in on the fly.
+ * @param timestamp
+ * An optional timestamp value to initialize the modified time of the
+ * collection. This should be in the format returned by new_timestamp().
+ *
+ * @return the new ServerCollection instance.
+ *
+ */
+function ServerCollection(wbos, acceptNew, timestamp) {
+ this._wbos = wbos || {};
+ this.acceptNew = acceptNew || false;
+
+ /*
+ * Track modified timestamp.
+ * We can't just use the timestamps of contained WBOs: an empty collection
+ * has a modified time.
+ */
+ this.timestamp = timestamp || new_timestamp();
+ this._log = Log.repository.getLogger(SYNC_HTTP_LOGGER);
+}
+ServerCollection.prototype = {
+
+ /**
+ * Convenience accessor for our WBO keys.
+ * Excludes deleted items, of course.
+ *
+ * @param filter
+ * A predicate function (applied to the ID and WBO) which dictates
+ * whether to include the WBO's ID in the output.
+ *
+ * @return an array of IDs.
+ */
+ keys: function keys(filter) {
+ let ids = [];
+ for (let [id, wbo] of Object.entries(this._wbos)) {
+ if (wbo.payload && (!filter || filter(id, wbo))) {
+ ids.push(id);
+ }
+ }
+ return ids;
+ },
+
+ /**
+ * Convenience method to get an array of WBOs.
+ * Optionally provide a filter function.
+ *
+ * @param filter
+ * A predicate function, applied to the WBO, which dictates whether to
+ * include the WBO in the output.
+ *
+ * @return an array of ServerWBOs.
+ */
+ wbos: function wbos(filter) {
+ let os = [];
+ for (let [id, wbo] of Object.entries(this._wbos)) {
+ if (wbo.payload) {
+ os.push(wbo);
+ }
+ }
+
+ if (filter) {
+ return os.filter(filter);
+ }
+ return os;
+ },
+
+ /**
+ * Convenience method to get an array of parsed ciphertexts.
+ *
+ * @return an array of the payloads of each stored WBO.
+ */
+ payloads: function () {
+ return this.wbos().map(function (wbo) {
+ return JSON.parse(JSON.parse(wbo.payload).ciphertext);
+ });
+ },
+
+ // Just for syntactic elegance.
+ wbo: function wbo(id) {
+ return this._wbos[id];
+ },
+
+ payload: function payload(id) {
+ return this.wbo(id).payload;
+ },
+
+ /**
+ * Insert the provided WBO under its ID.
+ *
+ * @return the provided WBO.
+ */
+ insertWBO: function insertWBO(wbo) {
+ return this._wbos[wbo.id] = wbo;
+ },
+
+ /**
+ * Insert the provided payload as part of a new ServerWBO with the provided
+ * ID.
+ *
+ * @param id
+ * The GUID for the WBO.
+ * @param payload
+ * The payload, as provided to the ServerWBO constructor.
+ * @param modified
+ * An optional modified time for the ServerWBO.
+ *
+ * @return the inserted WBO.
+ */
+ insert: function insert(id, payload, modified) {
+ return this.insertWBO(new ServerWBO(id, payload, modified));
+ },
+
+ /**
+ * Removes an object entirely from the collection.
+ *
+ * @param id
+ * (string) ID to remove.
+ */
+ remove: function remove(id) {
+ delete this._wbos[id];
+ },
+
+ _inResultSet: function(wbo, options) {
+ return wbo.payload
+ && (!options.ids || (options.ids.indexOf(wbo.id) != -1))
+ && (!options.newer || (wbo.modified > options.newer));
+ },
+
+ count: function(options) {
+ options = options || {};
+ let c = 0;
+ for (let [id, wbo] of Object.entries(this._wbos)) {
+ if (wbo.modified && this._inResultSet(wbo, options)) {
+ c++;
+ }
+ }
+ return c;
+ },
+
+ get: function(options) {
+ let result;
+ if (options.full) {
+ let data = [];
+ for (let [id, wbo] of Object.entries(this._wbos)) {
+ // Drop deleted.
+ if (wbo.modified && this._inResultSet(wbo, options)) {
+ data.push(wbo.get());
+ }
+ }
+ let start = options.offset || 0;
+ if (options.limit) {
+ let numItemsPastOffset = data.length - start;
+ data = data.slice(start, start + options.limit);
+ // use options as a backchannel to set x-weave-next-offset
+ if (numItemsPastOffset > options.limit) {
+ options.nextOffset = start + options.limit;
+ }
+ } else if (start) {
+ data = data.slice(start);
+ }
+ // Our implementation of application/newlines.
+ result = data.join("\n") + "\n";
+
+ // Use options as a backchannel to report count.
+ options.recordCount = data.length;
+ } else {
+ let data = [];
+ for (let [id, wbo] of Object.entries(this._wbos)) {
+ if (this._inResultSet(wbo, options)) {
+ data.push(id);
+ }
+ }
+ let start = options.offset || 0;
+ if (options.limit) {
+ data = data.slice(start, start + options.limit);
+ options.nextOffset = start + options.limit;
+ } else if (start) {
+ data = data.slice(start);
+ }
+ result = JSON.stringify(data);
+ options.recordCount = data.length;
+ }
+ return result;
+ },
+
+ post: function(input) {
+ input = JSON.parse(input);
+ let success = [];
+ let failed = {};
+
+ // This will count records where we have an existing ServerWBO
+ // registered with us as successful and all other records as failed.
+ for (let key in input) {
+ let record = input[key];
+ let wbo = this.wbo(record.id);
+ if (!wbo && this.acceptNew) {
+ this._log.debug("Creating WBO " + JSON.stringify(record.id) +
+ " on the fly.");
+ wbo = new ServerWBO(record.id);
+ this.insertWBO(wbo);
+ }
+ if (wbo) {
+ wbo.payload = record.payload;
+ wbo.modified = new_timestamp();
+ success.push(record.id);
+ } else {
+ failed[record.id] = "no wbo configured";
+ }
+ }
+ return {modified: new_timestamp(),
+ success: success,
+ failed: failed};
+ },
+
+ delete: function(options) {
+ let deleted = [];
+ for (let [id, wbo] of Object.entries(this._wbos)) {
+ if (this._inResultSet(wbo, options)) {
+ this._log.debug("Deleting " + JSON.stringify(wbo));
+ deleted.push(wbo.id);
+ wbo.delete();
+ }
+ }
+ return deleted;
+ },
+
+ // This handler sets `newModified` on the response body if the collection
+ // timestamp has changed.
+ handler: function() {
+ let self = this;
+
+ return function(request, response) {
+ var statusCode = 200;
+ var status = "OK";
+ var body;
+
+ // Parse queryString
+ let options = {};
+ for (let chunk of request.queryString.split("&")) {
+ if (!chunk) {
+ continue;
+ }
+ chunk = chunk.split("=");
+ if (chunk.length == 1) {
+ options[chunk[0]] = "";
+ } else {
+ options[chunk[0]] = chunk[1];
+ }
+ }
+ if (options.ids) {
+ options.ids = options.ids.split(",");
+ }
+ if (options.newer) {
+ options.newer = parseFloat(options.newer);
+ }
+ if (options.limit) {
+ options.limit = parseInt(options.limit, 10);
+ }
+ if (options.offset) {
+ options.offset = parseInt(options.offset, 10);
+ }
+
+ switch(request.method) {
+ case "GET":
+ body = self.get(options, request);
+ // see http://moz-services-docs.readthedocs.io/en/latest/storage/apis-1.5.html
+ // for description of these headers.
+ let { recordCount: records, nextOffset } = options;
+
+ self._log.info("Records: " + records + ", nextOffset: " + nextOffset);
+ if (records != null) {
+ response.setHeader("X-Weave-Records", "" + records);
+ }
+ if (nextOffset) {
+ response.setHeader("X-Weave-Next-Offset", "" + nextOffset);
+ }
+ response.setHeader("X-Last-Modified", "" + this.timestamp);
+ break;
+
+ case "POST":
+ let res = self.post(readBytesFromInputStream(request.bodyInputStream), request);
+ body = JSON.stringify(res);
+ response.newModified = res.modified;
+ break;
+
+ case "DELETE":
+ self._log.debug("Invoking ServerCollection.DELETE.");
+ let deleted = self.delete(options, request);
+ let ts = new_timestamp();
+ body = JSON.stringify(ts);
+ response.newModified = ts;
+ response.deleted = deleted;
+ break;
+ }
+ response.setHeader("X-Weave-Timestamp",
+ "" + new_timestamp(),
+ false);
+ response.setStatusLine(request.httpVersion, statusCode, status);
+ response.bodyOutputStream.write(body, body.length);
+
+ // Update the collection timestamp to the appropriate modified time.
+ // This is either a value set by the handler, or the current time.
+ if (request.method != "GET") {
+ this.timestamp = (response.newModified >= 0) ?
+ response.newModified :
+ new_timestamp();
+ }
+ };
+ }
+
+};
+
+/*
+ * Test setup helpers.
+ */
+function sync_httpd_setup(handlers) {
+ handlers["/1.1/foo/storage/meta/global"]
+ = (new ServerWBO("global", {})).handler();
+ return httpd_setup(handlers);
+}
+
+/*
+ * Track collection modified times. Return closures.
+ */
+function track_collections_helper() {
+
+ /*
+ * Our tracking object.
+ */
+ let collections = {};
+
+ /*
+ * Update the timestamp of a collection.
+ */
+ function update_collection(coll, ts) {
+ _("Updating collection " + coll + " to " + ts);
+ let timestamp = ts || new_timestamp();
+ collections[coll] = timestamp;
+ }
+
+ /*
+ * Invoke a handler, updating the collection's modified timestamp unless
+ * it's a GET request.
+ */
+ function with_updated_collection(coll, f) {
+ return function(request, response) {
+ f.call(this, request, response);
+
+ // Update the collection timestamp to the appropriate modified time.
+ // This is either a value set by the handler, or the current time.
+ if (request.method != "GET") {
+ update_collection(coll, response.newModified)
+ }
+ };
+ }
+
+ /*
+ * Return the info/collections object.
+ */
+ function info_collections(request, response) {
+ let body = "Error.";
+ switch(request.method) {
+ case "GET":
+ body = JSON.stringify(collections);
+ break;
+ default:
+ throw "Non-GET on info_collections.";
+ }
+
+ response.setHeader("Content-Type", "application/json");
+ response.setHeader("X-Weave-Timestamp",
+ "" + new_timestamp(),
+ false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ return {"collections": collections,
+ "handler": info_collections,
+ "with_updated_collection": with_updated_collection,
+ "update_collection": update_collection};
+}
+
+//===========================================================================//
+// httpd.js-based Sync server. //
+//===========================================================================//
+
+/**
+ * In general, the preferred way of using SyncServer is to directly introspect
+ * it. Callbacks are available for operations which are hard to verify through
+ * introspection, such as deletions.
+ *
+ * One of the goals of this server is to provide enough hooks for test code to
+ * find out what it needs without monkeypatching. Use this object as your
+ * prototype, and override as appropriate.
+ */
+var SyncServerCallback = {
+ onCollectionDeleted: function onCollectionDeleted(user, collection) {},
+ onItemDeleted: function onItemDeleted(user, collection, wboID) {},
+
+ /**
+ * Called at the top of every request.
+ *
+ * Allows the test to inspect the request. Hooks should be careful not to
+ * modify or change state of the request or they may impact future processing.
+ * The response is also passed so the callback can set headers etc - but care
+ * must be taken to not screw with the response body or headers that may
+ * conflict with normal operation of this server.
+ */
+ onRequest: function onRequest(request, response) {},
+};
+
+/**
+ * Construct a new test Sync server. Takes a callback object (e.g.,
+ * SyncServerCallback) as input.
+ */
+function SyncServer(callback) {
+ this.callback = callback || {__proto__: SyncServerCallback};
+ this.server = new HttpServer();
+ this.started = false;
+ this.users = {};
+ this._log = Log.repository.getLogger(SYNC_HTTP_LOGGER);
+
+ // Install our own default handler. This allows us to mess around with the
+ // whole URL space.
+ let handler = this.server._handler;
+ handler._handleDefault = this.handleDefault.bind(this, handler);
+}
+SyncServer.prototype = {
+ server: null, // HttpServer.
+ users: null, // Map of username => {collections, password}.
+
+ /**
+ * Start the SyncServer's underlying HTTP server.
+ *
+ * @param port
+ * The numeric port on which to start. -1 implies the default, a
+ * randomly chosen port.
+ * @param cb
+ * A callback function (of no arguments) which is invoked after
+ * startup.
+ */
+ start: function start(port = -1, cb) {
+ if (this.started) {
+ this._log.warn("Warning: server already started on " + this.port);
+ return;
+ }
+ try {
+ this.server.start(port);
+ let i = this.server.identity;
+ this.port = i.primaryPort;
+ this.baseURI = i.primaryScheme + "://" + i.primaryHost + ":" +
+ i.primaryPort + "/";
+ this.started = true;
+ if (cb) {
+ cb();
+ }
+ } catch (ex) {
+ _("==========================================");
+ _("Got exception starting Sync HTTP server.");
+ _("Error: " + Log.exceptionStr(ex));
+ _("Is there a process already listening on port " + port + "?");
+ _("==========================================");
+ do_throw(ex);
+ }
+
+ },
+
+ /**
+ * Stop the SyncServer's HTTP server.
+ *
+ * @param cb
+ * A callback function. Invoked after the server has been stopped.
+ *
+ */
+ stop: function stop(cb) {
+ if (!this.started) {
+ this._log.warn("SyncServer: Warning: server not running. Can't stop me now!");
+ return;
+ }
+
+ this.server.stop(cb);
+ this.started = false;
+ },
+
+ /**
+ * Return a server timestamp for a record.
+ * The server returns timestamps with 1/100 sec granularity. Note that this is
+ * subject to change: see Bug 650435.
+ */
+ timestamp: function timestamp() {
+ return new_timestamp();
+ },
+
+ /**
+ * Create a new user, complete with an empty set of collections.
+ *
+ * @param username
+ * The username to use. An Error will be thrown if a user by that name
+ * already exists.
+ * @param password
+ * A password string.
+ *
+ * @return a user object, as would be returned by server.user(username).
+ */
+ registerUser: function registerUser(username, password) {
+ if (username in this.users) {
+ throw new Error("User already exists.");
+ }
+ this.users[username] = {
+ password: password,
+ collections: {}
+ };
+ return this.user(username);
+ },
+
+ userExists: function userExists(username) {
+ return username in this.users;
+ },
+
+ getCollection: function getCollection(username, collection) {
+ return this.users[username].collections[collection];
+ },
+
+ _insertCollection: function _insertCollection(collections, collection, wbos) {
+ let coll = new ServerCollection(wbos, true);
+ coll.collectionHandler = coll.handler();
+ collections[collection] = coll;
+ return coll;
+ },
+
+ createCollection: function createCollection(username, collection, wbos) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let collections = this.users[username].collections;
+ if (collection in collections) {
+ throw new Error("Collection already exists.");
+ }
+ return this._insertCollection(collections, collection, wbos);
+ },
+
+ /**
+ * Accept a map like the following:
+ * {
+ * meta: {global: {version: 1, ...}},
+ * crypto: {"keys": {}, foo: {bar: 2}},
+ * bookmarks: {}
+ * }
+ * to cause collections and WBOs to be created.
+ * If a collection already exists, no error is raised.
+ * If a WBO already exists, it will be updated to the new contents.
+ */
+ createContents: function createContents(username, collections) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let userCollections = this.users[username].collections;
+ for (let [id, contents] of Object.entries(collections)) {
+ let coll = userCollections[id] ||
+ this._insertCollection(userCollections, id);
+ for (let [wboID, payload] of Object.entries(contents)) {
+ coll.insert(wboID, payload);
+ }
+ }
+ },
+
+ /**
+ * Insert a WBO in an existing collection.
+ */
+ insertWBO: function insertWBO(username, collection, wbo) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let userCollections = this.users[username].collections;
+ if (!(collection in userCollections)) {
+ throw new Error("Unknown collection.");
+ }
+ userCollections[collection].insertWBO(wbo);
+ return wbo;
+ },
+
+ /**
+ * Delete all of the collections for the named user.
+ *
+ * @param username
+ * The name of the affected user.
+ *
+ * @return a timestamp.
+ */
+ deleteCollections: function deleteCollections(username) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let userCollections = this.users[username].collections;
+ for (let name in userCollections) {
+ let coll = userCollections[name];
+ this._log.trace("Bulk deleting " + name + " for " + username + "...");
+ coll.delete({});
+ }
+ this.users[username].collections = {};
+ return this.timestamp();
+ },
+
+ /**
+ * Simple accessor to allow collective binding and abbreviation of a bunch of
+ * methods. Yay!
+ * Use like this:
+ *
+ * let u = server.user("john");
+ * u.collection("bookmarks").wbo("abcdefg").payload; // Etc.
+ *
+ * @return a proxy for the user data stored in this server.
+ */
+ user: function user(username) {
+ let collection = this.getCollection.bind(this, username);
+ let createCollection = this.createCollection.bind(this, username);
+ let createContents = this.createContents.bind(this, username);
+ let modified = function (collectionName) {
+ return collection(collectionName).timestamp;
+ }
+ let deleteCollections = this.deleteCollections.bind(this, username);
+ return {
+ collection: collection,
+ createCollection: createCollection,
+ createContents: createContents,
+ deleteCollections: deleteCollections,
+ modified: modified
+ };
+ },
+
+ /*
+ * Regular expressions for splitting up Sync request paths.
+ * Sync URLs are of the form:
+ * /$apipath/$version/$user/$further
+ * where $further is usually:
+ * storage/$collection/$wbo
+ * or
+ * storage/$collection
+ * or
+ * info/$op
+ * We assume for the sake of simplicity that $apipath is empty.
+ *
+ * N.B., we don't follow any kind of username spec here, because as far as I
+ * can tell there isn't one. See Bug 689671. Instead we follow the Python
+ * server code.
+ *
+ * Path: [all, version, username, first, rest]
+ * Storage: [all, collection?, id?]
+ */
+ pathRE: /^\/([0-9]+(?:\.[0-9]+)?)\/([-._a-zA-Z0-9]+)(?:\/([^\/]+)(?:\/(.+))?)?$/,
+ storageRE: /^([-_a-zA-Z0-9]+)(?:\/([-_a-zA-Z0-9]+)\/?)?$/,
+
+ defaultHeaders: {},
+
+ /**
+ * HTTP response utility.
+ */
+ respond: function respond(req, resp, code, status, body, headers) {
+ resp.setStatusLine(req.httpVersion, code, status);
+ if (!headers)
+ headers = this.defaultHeaders;
+ for (let header in headers) {
+ let value = headers[header];
+ resp.setHeader(header, value);
+ }
+ resp.setHeader("X-Weave-Timestamp", "" + this.timestamp(), false);
+ resp.bodyOutputStream.write(body, body.length);
+ },
+
+ /**
+ * This is invoked by the HttpServer. `this` is bound to the SyncServer;
+ * `handler` is the HttpServer's handler.
+ *
+ * TODO: need to use the correct Sync API response codes and errors here.
+ * TODO: Basic Auth.
+ * TODO: check username in path against username in BasicAuth.
+ */
+ handleDefault: function handleDefault(handler, req, resp) {
+ try {
+ this._handleDefault(handler, req, resp);
+ } catch (e) {
+ if (e instanceof HttpError) {
+ this.respond(req, resp, e.code, e.description, "", {});
+ } else {
+ throw e;
+ }
+ }
+ },
+
+ _handleDefault: function _handleDefault(handler, req, resp) {
+ this._log.debug("SyncServer: Handling request: " + req.method + " " + req.path);
+
+ if (this.callback.onRequest) {
+ this.callback.onRequest(req, resp);
+ }
+
+ let parts = this.pathRE.exec(req.path);
+ if (!parts) {
+ this._log.debug("SyncServer: Unexpected request: bad URL " + req.path);
+ throw HTTP_404;
+ }
+
+ let [all, version, username, first, rest] = parts;
+ // Doing a float compare of the version allows for us to pretend there was
+ // a node-reassignment - eg, we could re-assign from "1.1/user/" to
+ // "1.10/user" - this server will then still accept requests with the new
+ // URL while any code in sync itself which compares URLs will see a
+ // different URL.
+ if (parseFloat(version) != parseFloat(SYNC_API_VERSION)) {
+ this._log.debug("SyncServer: Unknown version.");
+ throw HTTP_404;
+ }
+
+ if (!this.userExists(username)) {
+ this._log.debug("SyncServer: Unknown user.");
+ throw HTTP_401;
+ }
+
+ // Hand off to the appropriate handler for this path component.
+ if (first in this.toplevelHandlers) {
+ let handler = this.toplevelHandlers[first];
+ return handler.call(this, handler, req, resp, version, username, rest);
+ }
+ this._log.debug("SyncServer: Unknown top-level " + first);
+ throw HTTP_404;
+ },
+
+ /**
+ * Compute the object that is returned for an info/collections request.
+ */
+ infoCollections: function infoCollections(username) {
+ let responseObject = {};
+ let colls = this.users[username].collections;
+ for (let coll in colls) {
+ responseObject[coll] = colls[coll].timestamp;
+ }
+ this._log.trace("SyncServer: info/collections returning " +
+ JSON.stringify(responseObject));
+ return responseObject;
+ },
+
+ /**
+ * Collection of the handler methods we use for top-level path components.
+ */
+ toplevelHandlers: {
+ "storage": function handleStorage(handler, req, resp, version, username, rest) {
+ let respond = this.respond.bind(this, req, resp);
+ if (!rest || !rest.length) {
+ this._log.debug("SyncServer: top-level storage " +
+ req.method + " request.");
+
+ // TODO: verify if this is spec-compliant.
+ if (req.method != "DELETE") {
+ respond(405, "Method Not Allowed", "[]", {"Allow": "DELETE"});
+ return undefined;
+ }
+
+ // Delete all collections and track the timestamp for the response.
+ let timestamp = this.user(username).deleteCollections();
+
+ // Return timestamp and OK for deletion.
+ respond(200, "OK", JSON.stringify(timestamp));
+ return undefined;
+ }
+
+ let match = this.storageRE.exec(rest);
+ if (!match) {
+ this._log.warn("SyncServer: Unknown storage operation " + rest);
+ throw HTTP_404;
+ }
+ let [all, collection, wboID] = match;
+ let coll = this.getCollection(username, collection);
+ switch (req.method) {
+ case "GET":
+ if (!coll) {
+ if (wboID) {
+ respond(404, "Not found", "Not found");
+ return undefined;
+ }
+ // *cries inside*: Bug 687299.
+ respond(200, "OK", "[]");
+ return undefined;
+ }
+ if (!wboID) {
+ return coll.collectionHandler(req, resp);
+ }
+ let wbo = coll.wbo(wboID);
+ if (!wbo) {
+ respond(404, "Not found", "Not found");
+ return undefined;
+ }
+ return wbo.handler()(req, resp);
+
+ // TODO: implement handling of X-If-Unmodified-Since for write verbs.
+ case "DELETE":
+ if (!coll) {
+ respond(200, "OK", "{}");
+ return undefined;
+ }
+ if (wboID) {
+ let wbo = coll.wbo(wboID);
+ if (wbo) {
+ wbo.delete();
+ this.callback.onItemDeleted(username, collection, wboID);
+ }
+ respond(200, "OK", "{}");
+ return undefined;
+ }
+ coll.collectionHandler(req, resp);
+
+ // Spot if this is a DELETE for some IDs, and don't blow away the
+ // whole collection!
+ //
+ // We already handled deleting the WBOs by invoking the deleted
+ // collection's handler. However, in the case of
+ //
+ // DELETE storage/foobar
+ //
+ // we also need to remove foobar from the collections map. This
+ // clause tries to differentiate the above request from
+ //
+ // DELETE storage/foobar?ids=foo,baz
+ //
+ // and do the right thing.
+ // TODO: less hacky method.
+ if (-1 == req.queryString.indexOf("ids=")) {
+ // When you delete the entire collection, we drop it.
+ this._log.debug("Deleting entire collection.");
+ delete this.users[username].collections[collection];
+ this.callback.onCollectionDeleted(username, collection);
+ }
+
+ // Notify of item deletion.
+ let deleted = resp.deleted || [];
+ for (let i = 0; i < deleted.length; ++i) {
+ this.callback.onItemDeleted(username, collection, deleted[i]);
+ }
+ return undefined;
+ case "POST":
+ case "PUT":
+ if (!coll) {
+ coll = this.createCollection(username, collection);
+ }
+ if (wboID) {
+ let wbo = coll.wbo(wboID);
+ if (!wbo) {
+ this._log.trace("SyncServer: creating WBO " + collection + "/" + wboID);
+ wbo = coll.insert(wboID);
+ }
+ // Rather than instantiate each WBO's handler function, do it once
+ // per request. They get hit far less often than do collections.
+ wbo.handler()(req, resp);
+ coll.timestamp = resp.newModified;
+ return resp;
+ }
+ return coll.collectionHandler(req, resp);
+ default:
+ throw "Request method " + req.method + " not implemented.";
+ }
+ },
+
+ "info": function handleInfo(handler, req, resp, version, username, rest) {
+ switch (rest) {
+ case "collections":
+ let body = JSON.stringify(this.infoCollections(username));
+ this.respond(req, resp, 200, "OK", body, {
+ "Content-Type": "application/json"
+ });
+ return;
+ case "collection_usage":
+ case "collection_counts":
+ case "quota":
+ // TODO: implement additional info methods.
+ this.respond(req, resp, 200, "OK", "TODO");
+ return;
+ default:
+ // TODO
+ this._log.warn("SyncServer: Unknown info operation " + rest);
+ throw HTTP_404;
+ }
+ }
+ }
+};
+
+/**
+ * Test helper.
+ */
+function serverForUsers(users, contents, callback) {
+ let server = new SyncServer(callback);
+ for (let [user, pass] of Object.entries(users)) {
+ server.registerUser(user, pass);
+ server.createContents(user, contents);
+ }
+ server.start();
+ return server;
+}
diff --git a/services/sync/tests/unit/missing-sourceuri.xml b/services/sync/tests/unit/missing-sourceuri.xml
new file mode 100644
index 000000000..dbc83e17f
--- /dev/null
+++ b/services/sync/tests/unit/missing-sourceuri.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5617">
+ <name>Restartless Test Extension</name>
+ <type id="1">Extension</type>
+ <guid>missing-sourceuri@tests.mozilla.org</guid>
+ <slug>missing-sourceuri</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>{3e3ba16c-1675-4e88-b9c8-afef81b3d2ef}</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="485"></install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+</addon>
+</searchresults>
diff --git a/services/sync/tests/unit/missing-xpi-search.xml b/services/sync/tests/unit/missing-xpi-search.xml
new file mode 100644
index 000000000..9b547cdb3
--- /dev/null
+++ b/services/sync/tests/unit/missing-xpi-search.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5617">
+ <name>Restartless Test Extension</name>
+ <type id="1">Extension</type>
+ <guid>missing-xpi@tests.mozilla.org</guid>
+ <slug>missing-xpi</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>{3e3ba16c-1675-4e88-b9c8-afef81b3d2ef}</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="485">http://127.0.0.1:8888/THIS_DOES_NOT_EXIST.xpi</install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+ </addon>
+</searchresults>
diff --git a/services/sync/tests/unit/places_v10_from_v11.sqlite b/services/sync/tests/unit/places_v10_from_v11.sqlite
new file mode 100644
index 000000000..e3f9ef446
--- /dev/null
+++ b/services/sync/tests/unit/places_v10_from_v11.sqlite
Binary files differ
diff --git a/services/sync/tests/unit/prefs_test_prefs_store.js b/services/sync/tests/unit/prefs_test_prefs_store.js
new file mode 100644
index 000000000..109757a35
--- /dev/null
+++ b/services/sync/tests/unit/prefs_test_prefs_store.js
@@ -0,0 +1,25 @@
+// This is a "preferences" file used by test_prefs_store.js
+
+// The prefs that control what should be synced.
+// Most of these are "default" prefs, so the value itself will not sync.
+pref("services.sync.prefs.sync.testing.int", true);
+pref("services.sync.prefs.sync.testing.string", true);
+pref("services.sync.prefs.sync.testing.bool", true);
+pref("services.sync.prefs.sync.testing.dont.change", true);
+// this one is a user pref, so it *will* sync.
+user_pref("services.sync.prefs.sync.testing.turned.off", false);
+pref("services.sync.prefs.sync.testing.nonexistent", true);
+pref("services.sync.prefs.sync.testing.default", true);
+
+// The preference values - these are all user_prefs, otherwise their value
+// will not be synced.
+user_pref("testing.int", 123);
+user_pref("testing.string", "ohai");
+user_pref("testing.bool", true);
+user_pref("testing.dont.change", "Please don't change me.");
+user_pref("testing.turned.off", "I won't get synced.");
+user_pref("testing.not.turned.on", "I won't get synced either!");
+
+// A pref that exists but still has the default value - will be synced with
+// null as the value.
+pref("testing.default", "I'm the default value");
diff --git a/services/sync/tests/unit/rewrite-search.xml b/services/sync/tests/unit/rewrite-search.xml
new file mode 100644
index 000000000..15476b1ab
--- /dev/null
+++ b/services/sync/tests/unit/rewrite-search.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5617">
+ <name>Rewrite Test Extension</name>
+ <type id="1">Extension</type>
+ <guid>rewrite@tests.mozilla.org</guid>
+ <slug>rewrite</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>xpcshell@tests.mozilla.org</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="485">http://127.0.0.1:8888/require.xpi?src=api</install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+ </addon>
+</searchresults>
diff --git a/services/sync/tests/unit/sync_ping_schema.json b/services/sync/tests/unit/sync_ping_schema.json
new file mode 100644
index 000000000..56114fb93
--- /dev/null
+++ b/services/sync/tests/unit/sync_ping_schema.json
@@ -0,0 +1,198 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "description": "schema for Sync pings, documentation avaliable in toolkit/components/telemetry/docs/sync-ping.rst",
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["version", "syncs", "why"],
+ "properties": {
+ "version": { "type": "integer", "minimum": 0 },
+ "discarded": { "type": "integer", "minimum": 1 },
+ "why": { "enum": ["shutdown", "schedule"] },
+ "syncs": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#/definitions/payload" }
+ }
+ },
+ "definitions": {
+ "payload": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["when", "uid", "took"],
+ "properties": {
+ "didLogin": { "type": "boolean" },
+ "when": { "type": "integer" },
+ "uid": {
+ "type": "string",
+ "pattern": "^[0-9a-f]{32}$"
+ },
+ "devices": {
+ "type": "array",
+ "items": { "$ref": "#/definitions/device" }
+ },
+ "deviceID": {
+ "type": "string",
+ "pattern": "^[0-9a-f]{64}$"
+ },
+ "status": {
+ "type": "object",
+ "anyOf": [
+ { "required": ["sync"] },
+ { "required": ["service"] }
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "sync": { "type": "string" },
+ "service": { "type": "string" }
+ }
+ },
+ "why": { "enum": ["startup", "schedule", "score", "user", "tabs"] },
+ "took": { "type": "integer", "minimum": -1 },
+ "failureReason": { "$ref": "#/definitions/error" },
+ "engines": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#/definitions/engine" }
+ }
+ }
+ },
+ "device": {
+ "required": ["os", "id", "version"],
+ "additionalProperties": false,
+ "type": "object",
+ "properties": {
+ "id": { "type": "string", "pattern": "^[0-9a-f]{64}$" },
+ "os": { "type": "string" },
+ "version": { "type": "string" }
+ }
+ },
+ "engine": {
+ "required": ["name"],
+ "additionalProperties": false,
+ "properties": {
+ "failureReason": { "$ref": "#/definitions/error" },
+ "name": { "enum": ["addons", "bookmarks", "clients", "forms", "history", "passwords", "prefs", "tabs"] },
+ "took": { "type": "integer", "minimum": 1 },
+ "status": { "type": "string" },
+ "incoming": {
+ "type": "object",
+ "additionalProperties": false,
+ "anyOf": [
+ {"required": ["applied"]},
+ {"required": ["failed"]},
+ {"required": ["newFailed"]},
+ {"required": ["reconciled"]}
+ ],
+ "properties": {
+ "applied": { "type": "integer", "minimum": 1 },
+ "failed": { "type": "integer", "minimum": 1 },
+ "newFailed": { "type": "integer", "minimum": 1 },
+ "reconciled": { "type": "integer", "minimum": 1 }
+ }
+ },
+ "outgoing": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#/definitions/outgoingBatch" }
+ },
+ "validation": {
+ "type": "object",
+ "additionalProperties": false,
+ "anyOf": [
+ { "required": ["checked"] },
+ { "required": ["failureReason"] }
+ ],
+ "properties": {
+ "checked": { "type": "integer", "minimum": 0 },
+ "failureReason": { "$ref": "#/definitions/error" },
+ "took": { "type": "integer" },
+ "version": { "type": "integer" },
+ "problems": {
+ "type": "array",
+ "minItems": 1,
+ "$ref": "#/definitions/validationProblem"
+ }
+ }
+ }
+ }
+ },
+ "outgoingBatch": {
+ "type": "object",
+ "additionalProperties": false,
+ "anyOf": [
+ {"required": ["sent"]},
+ {"required": ["failed"]}
+ ],
+ "properties": {
+ "sent": { "type": "integer", "minimum": 1 },
+ "failed": { "type": "integer", "minimum": 1 }
+ }
+ },
+ "error": {
+ "oneOf": [
+ { "$ref": "#/definitions/httpError" },
+ { "$ref": "#/definitions/nsError" },
+ { "$ref": "#/definitions/shutdownError" },
+ { "$ref": "#/definitions/authError" },
+ { "$ref": "#/definitions/otherError" },
+ { "$ref": "#/definitions/unexpectedError" },
+ { "$ref": "#/definitions/sqlError" }
+ ]
+ },
+ "httpError": {
+ "required": ["name", "code"],
+ "properties": {
+ "name": { "enum": ["httperror"] },
+ "code": { "type": "integer" }
+ }
+ },
+ "nsError": {
+ "required": ["name", "code"],
+ "properties": {
+ "name": { "enum": ["nserror"] },
+ "code": { "type": "integer" }
+ }
+ },
+ "shutdownError": {
+ "required": ["name"],
+ "properties": {
+ "name": { "enum": ["shutdownerror"] }
+ }
+ },
+ "authError": {
+ "required": ["name"],
+ "properties": {
+ "name": { "enum": ["autherror"] },
+ "from": { "enum": ["tokenserver", "fxaccounts", "hawkclient"] }
+ }
+ },
+ "otherError": {
+ "required": ["name"],
+ "properties": {
+ "name": { "enum": ["othererror"] },
+ "error": { "type": "string" }
+ }
+ },
+ "unexpectedError": {
+ "required": ["name"],
+ "properties": {
+ "name": { "enum": ["unexpectederror"] },
+ "error": { "type": "string" }
+ }
+ },
+ "sqlError": {
+ "required": ["name"],
+ "properties": {
+ "name": { "enum": ["sqlerror"] },
+ "code": { "type": "integer" }
+ }
+ },
+ "validationProblem": {
+ "required": ["name", "count"],
+ "properties": {
+ "name": { "type": "string" },
+ "count": { "type": "integer" }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/services/sync/tests/unit/systemaddon-search.xml b/services/sync/tests/unit/systemaddon-search.xml
new file mode 100644
index 000000000..d34e3937c
--- /dev/null
+++ b/services/sync/tests/unit/systemaddon-search.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<searchresults total_results="1">
+ <addon id="5618">
+ <name>System Add-on Test</name>
+ <type id="1">Extension</type>
+ <guid>system1@tests.mozilla.org</guid>
+ <slug>addon11</slug>
+ <version>1.0</version>
+
+ <compatible_applications><application>
+ <name>Firefox</name>
+ <application_id>1</application_id>
+ <min_version>3.6</min_version>
+ <max_version>*</max_version>
+ <appID>xpcshell@tests.mozilla.org</appID>
+ </application></compatible_applications>
+ <all_compatible_os><os>ALL</os></all_compatible_os>
+
+ <install os="ALL" size="999">http://127.0.0.1:8888/system.xpi</install>
+ <created epoch="1252903662">
+ 2009-09-14T04:47:42Z
+ </created>
+ <last_updated epoch="1315255329">
+ 2011-09-05T20:42:09Z
+ </last_updated>
+ </addon>
+</searchresults>
diff --git a/services/sync/tests/unit/test_addon_utils.js b/services/sync/tests/unit/test_addon_utils.js
new file mode 100644
index 000000000..bbbd81d0d
--- /dev/null
+++ b/services/sync/tests/unit/test_addon_utils.js
@@ -0,0 +1,141 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://services-sync/addonutils.js");
+Cu.import("resource://services-sync/util.js");
+
+const HTTP_PORT = 8888;
+const SERVER_ADDRESS = "http://127.0.0.1:8888";
+
+var prefs = new Preferences();
+
+prefs.set("extensions.getAddons.get.url",
+ SERVER_ADDRESS + "/search/guid:%IDS%");
+
+loadAddonTestFunctions();
+startupManager();
+
+function createAndStartHTTPServer(port=HTTP_PORT) {
+ try {
+ let server = new HttpServer();
+
+ let bootstrap1XPI = ExtensionsTestPath("/addons/test_bootstrap1_1.xpi");
+
+ server.registerFile("/search/guid:missing-sourceuri%40tests.mozilla.org",
+ do_get_file("missing-sourceuri.xml"));
+
+ server.registerFile("/search/guid:rewrite%40tests.mozilla.org",
+ do_get_file("rewrite-search.xml"));
+
+ server.start(port);
+
+ return server;
+ } catch (ex) {
+ _("Got exception starting HTTP server on port " + port);
+ _("Error: " + Log.exceptionStr(ex));
+ do_throw(ex);
+ }
+}
+
+function run_test() {
+ initTestLogging("Trace");
+
+ run_next_test();
+}
+
+add_test(function test_handle_empty_source_uri() {
+ _("Ensure that search results without a sourceURI are properly ignored.");
+
+ let server = createAndStartHTTPServer();
+
+ const ID = "missing-sourceuri@tests.mozilla.org";
+
+ let cb = Async.makeSpinningCallback();
+ AddonUtils.installAddons([{id: ID, requireSecureURI: false}], cb);
+ let result = cb.wait();
+
+ do_check_true("installedIDs" in result);
+ do_check_eq(0, result.installedIDs.length);
+
+ do_check_true("skipped" in result);
+ do_check_true(result.skipped.includes(ID));
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_ignore_untrusted_source_uris() {
+ _("Ensures that source URIs from insecure schemes are rejected.");
+
+ let ioService = Cc["@mozilla.org/network/io-service;1"]
+ .getService(Ci.nsIIOService);
+
+ const bad = ["http://example.com/foo.xpi",
+ "ftp://example.com/foo.xpi",
+ "silly://example.com/foo.xpi"];
+
+ const good = ["https://example.com/foo.xpi"];
+
+ for (let s of bad) {
+ let sourceURI = ioService.newURI(s, null, null);
+ let addon = {sourceURI: sourceURI, name: "bad", id: "bad"};
+
+ let canInstall = AddonUtils.canInstallAddon(addon);
+ do_check_false(canInstall, "Correctly rejected a bad URL");
+ }
+
+ for (let s of good) {
+ let sourceURI = ioService.newURI(s, null, null);
+ let addon = {sourceURI: sourceURI, name: "good", id: "good"};
+
+ let canInstall = AddonUtils.canInstallAddon(addon);
+ do_check_true(canInstall, "Correctly accepted a good URL");
+ }
+ run_next_test();
+});
+
+add_test(function test_source_uri_rewrite() {
+ _("Ensure that a 'src=api' query string is rewritten to 'src=sync'");
+
+ // This tests for conformance with bug 708134 so server-side metrics aren't
+ // skewed.
+
+ // We resort to monkeypatching because of the API design.
+ let oldFunction = AddonUtils.__proto__.installAddonFromSearchResult;
+
+ let installCalled = false;
+ AddonUtils.__proto__.installAddonFromSearchResult =
+ function testInstallAddon(addon, metadata, cb) {
+
+ do_check_eq(SERVER_ADDRESS + "/require.xpi?src=sync",
+ addon.sourceURI.spec);
+
+ installCalled = true;
+
+ AddonUtils.getInstallFromSearchResult(addon, function (error, install) {
+ do_check_null(error);
+ do_check_eq(SERVER_ADDRESS + "/require.xpi?src=sync",
+ install.sourceURI.spec);
+
+ cb(null, {id: addon.id, addon: addon, install: install});
+ }, false);
+ };
+
+ let server = createAndStartHTTPServer();
+
+ let installCallback = Async.makeSpinningCallback();
+ let installOptions = {
+ id: "rewrite@tests.mozilla.org",
+ requireSecureURI: false,
+ }
+ AddonUtils.installAddons([installOptions], installCallback);
+
+ installCallback.wait();
+ do_check_true(installCalled);
+ AddonUtils.__proto__.installAddonFromSearchResult = oldFunction;
+
+ server.stop(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_addons_engine.js b/services/sync/tests/unit/test_addons_engine.js
new file mode 100644
index 000000000..64e4e32e8
--- /dev/null
+++ b/services/sync/tests/unit/test_addons_engine.js
@@ -0,0 +1,253 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/AddonManager.jsm");
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/addonsreconciler.js");
+Cu.import("resource://services-sync/engines/addons.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+var prefs = new Preferences();
+prefs.set("extensions.getAddons.get.url",
+ "http://localhost:8888/search/guid:%IDS%");
+prefs.set("extensions.install.requireSecureOrigin", false);
+
+loadAddonTestFunctions();
+startupManager();
+
+var engineManager = Service.engineManager;
+
+engineManager.register(AddonsEngine);
+var engine = engineManager.get("addons");
+var reconciler = engine._reconciler;
+var tracker = engine._tracker;
+
+function advance_test() {
+ reconciler._addons = {};
+ reconciler._changes = [];
+
+ let cb = Async.makeSpinningCallback();
+ reconciler.saveState(null, cb);
+ cb.wait();
+
+ run_next_test();
+}
+
+// This is a basic sanity test for the unit test itself. If this breaks, the
+// add-ons API likely changed upstream.
+add_test(function test_addon_install() {
+ _("Ensure basic add-on APIs work as expected.");
+
+ let install = getAddonInstall("test_bootstrap1_1");
+ do_check_neq(install, null);
+ do_check_eq(install.type, "extension");
+ do_check_eq(install.name, "Test Bootstrap 1");
+
+ advance_test();
+});
+
+add_test(function test_find_dupe() {
+ _("Ensure the _findDupe() implementation is sane.");
+
+ // This gets invoked at the top of sync, which is bypassed by this
+ // test, so we do it manually.
+ engine._refreshReconcilerState();
+
+ let addon = installAddon("test_bootstrap1_1");
+
+ let record = {
+ id: Utils.makeGUID(),
+ addonID: addon.id,
+ enabled: true,
+ applicationID: Services.appinfo.ID,
+ source: "amo"
+ };
+
+ let dupe = engine._findDupe(record);
+ do_check_eq(addon.syncGUID, dupe);
+
+ record.id = addon.syncGUID;
+ dupe = engine._findDupe(record);
+ do_check_eq(null, dupe);
+
+ uninstallAddon(addon);
+ advance_test();
+});
+
+add_test(function test_get_changed_ids() {
+ _("Ensure getChangedIDs() has the appropriate behavior.");
+
+ _("Ensure getChangedIDs() returns an empty object by default.");
+ let changes = engine.getChangedIDs();
+ do_check_eq("object", typeof(changes));
+ do_check_eq(0, Object.keys(changes).length);
+
+ _("Ensure tracker changes are populated.");
+ let now = new Date();
+ let changeTime = now.getTime() / 1000;
+ let guid1 = Utils.makeGUID();
+ tracker.addChangedID(guid1, changeTime);
+
+ changes = engine.getChangedIDs();
+ do_check_eq("object", typeof(changes));
+ do_check_eq(1, Object.keys(changes).length);
+ do_check_true(guid1 in changes);
+ do_check_eq(changeTime, changes[guid1]);
+
+ tracker.clearChangedIDs();
+
+ _("Ensure reconciler changes are populated.");
+ let addon = installAddon("test_bootstrap1_1");
+ tracker.clearChangedIDs(); // Just in case.
+ changes = engine.getChangedIDs();
+ do_check_eq("object", typeof(changes));
+ do_check_eq(1, Object.keys(changes).length);
+ do_check_true(addon.syncGUID in changes);
+ _("Change time: " + changeTime + ", addon change: " + changes[addon.syncGUID]);
+ do_check_true(changes[addon.syncGUID] >= changeTime);
+
+ let oldTime = changes[addon.syncGUID];
+ let guid2 = addon.syncGUID;
+ uninstallAddon(addon);
+ changes = engine.getChangedIDs();
+ do_check_eq(1, Object.keys(changes).length);
+ do_check_true(guid2 in changes);
+ do_check_true(changes[guid2] > oldTime);
+
+ _("Ensure non-syncable add-ons aren't picked up by reconciler changes.");
+ reconciler._addons = {};
+ reconciler._changes = [];
+ let record = {
+ id: "DUMMY",
+ guid: Utils.makeGUID(),
+ enabled: true,
+ installed: true,
+ modified: new Date(),
+ type: "UNSUPPORTED",
+ scope: 0,
+ foreignInstall: false
+ };
+ reconciler.addons["DUMMY"] = record;
+ reconciler._addChange(record.modified, CHANGE_INSTALLED, record);
+
+ changes = engine.getChangedIDs();
+ _(JSON.stringify(changes));
+ do_check_eq(0, Object.keys(changes).length);
+
+ advance_test();
+});
+
+add_test(function test_disabled_install_semantics() {
+ _("Ensure that syncing a disabled add-on preserves proper state.");
+
+ // This is essentially a test for bug 712542, which snuck into the original
+ // add-on sync drop. It ensures that when an add-on is installed that the
+ // disabled state and incoming syncGUID is preserved, even on the next sync.
+ const USER = "foo";
+ const PASSWORD = "password";
+ const PASSPHRASE = "abcdeabcdeabcdeabcdeabcdea";
+ const ADDON_ID = "addon1@tests.mozilla.org";
+
+ let server = new SyncServer();
+ server.start();
+ new SyncTestingInfrastructure(server.server, USER, PASSWORD, PASSPHRASE);
+
+ generateNewKeys(Service.collectionKeys);
+
+ let contents = {
+ meta: {global: {engines: {addons: {version: engine.version,
+ syncID: engine.syncID}}}},
+ crypto: {},
+ addons: {}
+ };
+
+ server.registerUser(USER, "password");
+ server.createContents(USER, contents);
+
+ let amoServer = new HttpServer();
+ amoServer.registerFile("/search/guid:addon1%40tests.mozilla.org",
+ do_get_file("addon1-search.xml"));
+
+ let installXPI = ExtensionsTestPath("/addons/test_install1.xpi");
+ amoServer.registerFile("/addon1.xpi", do_get_file(installXPI));
+ amoServer.start(8888);
+
+ // Insert an existing record into the server.
+ let id = Utils.makeGUID();
+ let now = Date.now() / 1000;
+
+ let record = encryptPayload({
+ id: id,
+ applicationID: Services.appinfo.ID,
+ addonID: ADDON_ID,
+ enabled: false,
+ deleted: false,
+ source: "amo",
+ });
+ let wbo = new ServerWBO(id, record, now - 2);
+ server.insertWBO(USER, "addons", wbo);
+
+ _("Performing sync of add-ons engine.");
+ engine._sync();
+
+ // At this point the non-restartless extension should be staged for install.
+
+ // Don't need this server any more.
+ let cb = Async.makeSpinningCallback();
+ amoServer.stop(cb);
+ cb.wait();
+
+ // We ensure the reconciler has recorded the proper ID and enabled state.
+ let addon = reconciler.getAddonStateFromSyncGUID(id);
+ do_check_neq(null, addon);
+ do_check_eq(false, addon.enabled);
+
+ // We fake an app restart and perform another sync, just to make sure things
+ // are sane.
+ restartManager();
+
+ engine._sync();
+
+ // The client should not upload a new record. The old record should be
+ // retained and unmodified.
+ let collection = server.getCollection(USER, "addons");
+ do_check_eq(1, collection.count());
+
+ let payload = collection.payloads()[0];
+ do_check_neq(null, collection.wbo(id));
+ do_check_eq(ADDON_ID, payload.addonID);
+ do_check_false(payload.enabled);
+
+ server.stop(advance_test);
+});
+
+add_test(function cleanup() {
+ // There's an xpcom-shutdown hook for this, but let's give this a shot.
+ reconciler.stopListening();
+ run_next_test();
+});
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Addons").level =
+ Log.Level.Trace;
+ Log.repository.getLogger("Sync.Store.Addons").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Tracker.Addons").level =
+ Log.Level.Trace;
+ Log.repository.getLogger("Sync.AddonsRepository").level =
+ Log.Level.Trace;
+
+ reconciler.startListening();
+
+ // Don't flush to disk in the middle of an event listener!
+ // This causes test hangs on WinXP.
+ reconciler._shouldPersist = false;
+
+ advance_test();
+}
diff --git a/services/sync/tests/unit/test_addons_reconciler.js b/services/sync/tests/unit/test_addons_reconciler.js
new file mode 100644
index 000000000..d93bdfc03
--- /dev/null
+++ b/services/sync/tests/unit/test_addons_reconciler.js
@@ -0,0 +1,195 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/AddonManager.jsm");
+Cu.import("resource://services-sync/addonsreconciler.js");
+Cu.import("resource://services-sync/engines/addons.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+loadAddonTestFunctions();
+startupManager();
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.AddonsReconciler").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.AddonsReconciler").level =
+ Log.Level.Trace;
+
+ Svc.Prefs.set("engine.addons", true);
+ Service.engineManager.register(AddonsEngine);
+
+ run_next_test();
+}
+
+add_test(function test_defaults() {
+ _("Ensure new objects have reasonable defaults.");
+
+ let reconciler = new AddonsReconciler();
+
+ do_check_false(reconciler._listening);
+ do_check_eq("object", typeof(reconciler.addons));
+ do_check_eq(0, Object.keys(reconciler.addons).length);
+ do_check_eq(0, reconciler._changes.length);
+ do_check_eq(0, reconciler._listeners.length);
+
+ run_next_test();
+});
+
+add_test(function test_load_state_empty_file() {
+ _("Ensure loading from a missing file results in defaults being set.");
+
+ let reconciler = new AddonsReconciler();
+
+ reconciler.loadState(null, function(error, loaded) {
+ do_check_eq(null, error);
+ do_check_false(loaded);
+
+ do_check_eq("object", typeof(reconciler.addons));
+ do_check_eq(0, Object.keys(reconciler.addons).length);
+ do_check_eq(0, reconciler._changes.length);
+
+ run_next_test();
+ });
+});
+
+add_test(function test_install_detection() {
+ _("Ensure that add-on installation results in appropriate side-effects.");
+
+ let reconciler = new AddonsReconciler();
+ reconciler.startListening();
+
+ let before = new Date();
+ let addon = installAddon("test_bootstrap1_1");
+ let after = new Date();
+
+ do_check_eq(1, Object.keys(reconciler.addons).length);
+ do_check_true(addon.id in reconciler.addons);
+ let record = reconciler.addons[addon.id];
+
+ const KEYS = ["id", "guid", "enabled", "installed", "modified", "type",
+ "scope", "foreignInstall"];
+ for (let key of KEYS) {
+ do_check_true(key in record);
+ do_check_neq(null, record[key]);
+ }
+
+ do_check_eq(addon.id, record.id);
+ do_check_eq(addon.syncGUID, record.guid);
+ do_check_true(record.enabled);
+ do_check_true(record.installed);
+ do_check_true(record.modified >= before && record.modified <= after);
+ do_check_eq("extension", record.type);
+ do_check_false(record.foreignInstall);
+
+ do_check_eq(1, reconciler._changes.length);
+ let change = reconciler._changes[0];
+ do_check_true(change[0] >= before && change[1] <= after);
+ do_check_eq(CHANGE_INSTALLED, change[1]);
+ do_check_eq(addon.id, change[2]);
+
+ uninstallAddon(addon);
+
+ run_next_test();
+});
+
+add_test(function test_uninstall_detection() {
+ _("Ensure that add-on uninstallation results in appropriate side-effects.");
+
+ let reconciler = new AddonsReconciler();
+ reconciler.startListening();
+
+ reconciler._addons = {};
+ reconciler._changes = [];
+
+ let addon = installAddon("test_bootstrap1_1");
+ let id = addon.id;
+ let guid = addon.syncGUID;
+
+ reconciler._changes = [];
+ uninstallAddon(addon);
+
+ do_check_eq(1, Object.keys(reconciler.addons).length);
+ do_check_true(id in reconciler.addons);
+
+ let record = reconciler.addons[id];
+ do_check_false(record.installed);
+
+ do_check_eq(1, reconciler._changes.length);
+ let change = reconciler._changes[0];
+ do_check_eq(CHANGE_UNINSTALLED, change[1]);
+ do_check_eq(id, change[2]);
+
+ run_next_test();
+});
+
+add_test(function test_load_state_future_version() {
+ _("Ensure loading a file from a future version results in no data loaded.");
+
+ const FILENAME = "TEST_LOAD_STATE_FUTURE_VERSION";
+
+ let reconciler = new AddonsReconciler();
+
+ // First we populate our new file.
+ let state = {version: 100, addons: {foo: {}}, changes: [[1, 1, "foo"]]};
+ let cb = Async.makeSyncCallback();
+
+ // jsonSave() expects an object with ._log, so we give it a reconciler
+ // instance.
+ Utils.jsonSave(FILENAME, reconciler, state, cb);
+ Async.waitForSyncCallback(cb);
+
+ reconciler.loadState(FILENAME, function(error, loaded) {
+ do_check_eq(null, error);
+ do_check_false(loaded);
+
+ do_check_eq("object", typeof(reconciler.addons));
+ do_check_eq(1, Object.keys(reconciler.addons).length);
+ do_check_eq(1, reconciler._changes.length);
+
+ run_next_test();
+ });
+});
+
+add_test(function test_prune_changes_before_date() {
+ _("Ensure that old changes are pruned properly.");
+
+ let reconciler = new AddonsReconciler();
+ reconciler._ensureStateLoaded();
+ reconciler._changes = [];
+
+ let now = new Date();
+ const HOUR_MS = 1000 * 60 * 60;
+
+ _("Ensure pruning an empty changes array works.");
+ reconciler.pruneChangesBeforeDate(now);
+ do_check_eq(0, reconciler._changes.length);
+
+ let old = new Date(now.getTime() - HOUR_MS);
+ let young = new Date(now.getTime() - 1000);
+ reconciler._changes.push([old, CHANGE_INSTALLED, "foo"]);
+ reconciler._changes.push([young, CHANGE_INSTALLED, "bar"]);
+ do_check_eq(2, reconciler._changes.length);
+
+ _("Ensure pruning with an old time won't delete anything.");
+ let threshold = new Date(old.getTime() - 1);
+ reconciler.pruneChangesBeforeDate(threshold);
+ do_check_eq(2, reconciler._changes.length);
+
+ _("Ensure pruning a single item works.");
+ threshold = new Date(young.getTime() - 1000);
+ reconciler.pruneChangesBeforeDate(threshold);
+ do_check_eq(1, reconciler._changes.length);
+ do_check_neq(undefined, reconciler._changes[0]);
+ do_check_eq(young, reconciler._changes[0][0]);
+ do_check_eq("bar", reconciler._changes[0][2]);
+
+ _("Ensure pruning all changes works.");
+ reconciler._changes.push([old, CHANGE_INSTALLED, "foo"]);
+ reconciler.pruneChangesBeforeDate(now);
+ do_check_eq(0, reconciler._changes.length);
+
+ run_next_test();
+});
diff --git a/services/sync/tests/unit/test_addons_store.js b/services/sync/tests/unit/test_addons_store.js
new file mode 100644
index 000000000..b52cfab31
--- /dev/null
+++ b/services/sync/tests/unit/test_addons_store.js
@@ -0,0 +1,539 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://services-sync/addonutils.js");
+Cu.import("resource://services-sync/engines/addons.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/FileUtils.jsm");
+
+const HTTP_PORT = 8888;
+
+var prefs = new Preferences();
+
+prefs.set("extensions.getAddons.get.url", "http://localhost:8888/search/guid:%IDS%");
+prefs.set("extensions.install.requireSecureOrigin", false);
+
+const SYSTEM_ADDON_ID = "system1@tests.mozilla.org";
+let systemAddonFile;
+
+// The system add-on must be installed before AddonManager is started.
+function loadSystemAddon() {
+ let addonFilename = SYSTEM_ADDON_ID + ".xpi";
+ const distroDir = FileUtils.getDir("ProfD", ["sysfeatures", "app0"], true);
+ do_get_file(ExtensionsTestPath("/data/system_addons/system1_1.xpi")).copyTo(distroDir, addonFilename);
+ systemAddonFile = FileUtils.File(distroDir.path);
+ systemAddonFile.append(addonFilename);
+ systemAddonFile.lastModifiedTime = Date.now();
+ // As we're not running in application, we need to setup the features directory
+ // used by system add-ons.
+ registerDirectory("XREAppFeat", distroDir);
+}
+
+loadAddonTestFunctions();
+loadSystemAddon();
+startupManager();
+
+Service.engineManager.register(AddonsEngine);
+var engine = Service.engineManager.get("addons");
+var tracker = engine._tracker;
+var store = engine._store;
+var reconciler = engine._reconciler;
+
+/**
+ * Create a AddonsRec for this application with the fields specified.
+ *
+ * @param id Sync GUID of record
+ * @param addonId ID of add-on
+ * @param enabled Boolean whether record is enabled
+ * @param deleted Boolean whether record was deleted
+ */
+function createRecordForThisApp(id, addonId, enabled, deleted) {
+ return {
+ id: id,
+ addonID: addonId,
+ enabled: enabled,
+ deleted: !!deleted,
+ applicationID: Services.appinfo.ID,
+ source: "amo"
+ };
+}
+
+function createAndStartHTTPServer(port) {
+ try {
+ let server = new HttpServer();
+
+ let bootstrap1XPI = ExtensionsTestPath("/addons/test_bootstrap1_1.xpi");
+
+ server.registerFile("/search/guid:bootstrap1%40tests.mozilla.org",
+ do_get_file("bootstrap1-search.xml"));
+ server.registerFile("/bootstrap1.xpi", do_get_file(bootstrap1XPI));
+
+ server.registerFile("/search/guid:missing-xpi%40tests.mozilla.org",
+ do_get_file("missing-xpi-search.xml"));
+
+ server.registerFile("/search/guid:system1%40tests.mozilla.org",
+ do_get_file("systemaddon-search.xml"));
+ server.registerFile("/system.xpi", systemAddonFile);
+
+ server.start(port);
+
+ return server;
+ } catch (ex) {
+ _("Got exception starting HTTP server on port " + port);
+ _("Error: " + Log.exceptionStr(ex));
+ do_throw(ex);
+ }
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Addons").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Tracker.Addons").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.AddonsRepository").level =
+ Log.Level.Trace;
+
+ reconciler.startListening();
+
+ // Don't flush to disk in the middle of an event listener!
+ // This causes test hangs on WinXP.
+ reconciler._shouldPersist = false;
+
+ run_next_test();
+}
+
+add_test(function test_remove() {
+ _("Ensure removing add-ons from deleted records works.");
+
+ let addon = installAddon("test_bootstrap1_1");
+ let record = createRecordForThisApp(addon.syncGUID, addon.id, true, true);
+
+ let failed = store.applyIncomingBatch([record]);
+ do_check_eq(0, failed.length);
+
+ let newAddon = getAddonFromAddonManagerByID(addon.id);
+ do_check_eq(null, newAddon);
+
+ run_next_test();
+});
+
+add_test(function test_apply_enabled() {
+ _("Ensures that changes to the userEnabled flag apply.");
+
+ let addon = installAddon("test_bootstrap1_1");
+ do_check_true(addon.isActive);
+ do_check_false(addon.userDisabled);
+
+ _("Ensure application of a disable record works as expected.");
+ let records = [];
+ records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
+ let failed = store.applyIncomingBatch(records);
+ do_check_eq(0, failed.length);
+ addon = getAddonFromAddonManagerByID(addon.id);
+ do_check_true(addon.userDisabled);
+ records = [];
+
+ _("Ensure enable record works as expected.");
+ records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, false));
+ failed = store.applyIncomingBatch(records);
+ do_check_eq(0, failed.length);
+ addon = getAddonFromAddonManagerByID(addon.id);
+ do_check_false(addon.userDisabled);
+ records = [];
+
+ _("Ensure enabled state updates don't apply if the ignore pref is set.");
+ records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
+ Svc.Prefs.set("addons.ignoreUserEnabledChanges", true);
+ failed = store.applyIncomingBatch(records);
+ do_check_eq(0, failed.length);
+ addon = getAddonFromAddonManagerByID(addon.id);
+ do_check_false(addon.userDisabled);
+ records = [];
+
+ uninstallAddon(addon);
+ Svc.Prefs.reset("addons.ignoreUserEnabledChanges");
+ run_next_test();
+});
+
+add_test(function test_ignore_different_appid() {
+ _("Ensure that incoming records with a different application ID are ignored.");
+
+ // We test by creating a record that should result in an update.
+ let addon = installAddon("test_bootstrap1_1");
+ do_check_false(addon.userDisabled);
+
+ let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
+ record.applicationID = "FAKE_ID";
+
+ let failed = store.applyIncomingBatch([record]);
+ do_check_eq(0, failed.length);
+
+ let newAddon = getAddonFromAddonManagerByID(addon.id);
+ do_check_false(addon.userDisabled);
+
+ uninstallAddon(addon);
+
+ run_next_test();
+});
+
+add_test(function test_ignore_unknown_source() {
+ _("Ensure incoming records with unknown source are ignored.");
+
+ let addon = installAddon("test_bootstrap1_1");
+
+ let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
+ record.source = "DUMMY_SOURCE";
+
+ let failed = store.applyIncomingBatch([record]);
+ do_check_eq(0, failed.length);
+
+ let newAddon = getAddonFromAddonManagerByID(addon.id);
+ do_check_false(addon.userDisabled);
+
+ uninstallAddon(addon);
+
+ run_next_test();
+});
+
+add_test(function test_apply_uninstall() {
+ _("Ensures that uninstalling an add-on from a record works.");
+
+ let addon = installAddon("test_bootstrap1_1");
+
+ let records = [];
+ records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, true));
+ let failed = store.applyIncomingBatch(records);
+ do_check_eq(0, failed.length);
+
+ addon = getAddonFromAddonManagerByID(addon.id);
+ do_check_eq(null, addon);
+
+ run_next_test();
+});
+
+add_test(function test_addon_syncability() {
+ _("Ensure isAddonSyncable functions properly.");
+
+ Svc.Prefs.set("addons.trustedSourceHostnames",
+ "addons.mozilla.org,other.example.com");
+
+ do_check_false(store.isAddonSyncable(null));
+
+ let addon = installAddon("test_bootstrap1_1");
+ do_check_true(store.isAddonSyncable(addon));
+
+ let dummy = {};
+ const KEYS = ["id", "syncGUID", "type", "scope", "foreignInstall", "isSyncable"];
+ for (let k of KEYS) {
+ dummy[k] = addon[k];
+ }
+
+ do_check_true(store.isAddonSyncable(dummy));
+
+ dummy.type = "UNSUPPORTED";
+ do_check_false(store.isAddonSyncable(dummy));
+ dummy.type = addon.type;
+
+ dummy.scope = 0;
+ do_check_false(store.isAddonSyncable(dummy));
+ dummy.scope = addon.scope;
+
+ dummy.isSyncable = false;
+ do_check_false(store.isAddonSyncable(dummy));
+ dummy.isSyncable = addon.isSyncable;
+
+ dummy.foreignInstall = true;
+ do_check_false(store.isAddonSyncable(dummy));
+ dummy.foreignInstall = false;
+
+ uninstallAddon(addon);
+
+ do_check_false(store.isSourceURITrusted(null));
+
+ function createURI(s) {
+ let service = Components.classes["@mozilla.org/network/io-service;1"]
+ .getService(Components.interfaces.nsIIOService);
+ return service.newURI(s, null, null);
+ }
+
+ let trusted = [
+ "https://addons.mozilla.org/foo",
+ "https://other.example.com/foo"
+ ];
+
+ let untrusted = [
+ "http://addons.mozilla.org/foo", // non-https
+ "ftps://addons.mozilla.org/foo", // non-https
+ "https://untrusted.example.com/foo", // non-trusted hostname`
+ ];
+
+ for (let uri of trusted) {
+ do_check_true(store.isSourceURITrusted(createURI(uri)));
+ }
+
+ for (let uri of untrusted) {
+ do_check_false(store.isSourceURITrusted(createURI(uri)));
+ }
+
+ Svc.Prefs.set("addons.trustedSourceHostnames", "");
+ for (let uri of trusted) {
+ do_check_false(store.isSourceURITrusted(createURI(uri)));
+ }
+
+ Svc.Prefs.set("addons.trustedSourceHostnames", "addons.mozilla.org");
+ do_check_true(store.isSourceURITrusted(createURI("https://addons.mozilla.org/foo")));
+
+ Svc.Prefs.reset("addons.trustedSourceHostnames");
+
+ run_next_test();
+});
+
+add_test(function test_ignore_hotfixes() {
+ _("Ensure that hotfix extensions are ignored.");
+
+ // A hotfix extension is one that has the id the same as the
+ // extensions.hotfix.id pref.
+ let prefs = new Preferences("extensions.");
+
+ let addon = installAddon("test_bootstrap1_1");
+ do_check_true(store.isAddonSyncable(addon));
+
+ let dummy = {};
+ const KEYS = ["id", "syncGUID", "type", "scope", "foreignInstall", "isSyncable"];
+ for (let k of KEYS) {
+ dummy[k] = addon[k];
+ }
+
+ // Basic sanity check.
+ do_check_true(store.isAddonSyncable(dummy));
+
+ prefs.set("hotfix.id", dummy.id);
+ do_check_false(store.isAddonSyncable(dummy));
+
+ // Verify that int values don't throw off checking.
+ let prefSvc = Cc["@mozilla.org/preferences-service;1"]
+ .getService(Ci.nsIPrefService)
+ .getBranch("extensions.");
+ // Need to delete pref before changing type.
+ prefSvc.deleteBranch("hotfix.id");
+ prefSvc.setIntPref("hotfix.id", 0xdeadbeef);
+
+ do_check_true(store.isAddonSyncable(dummy));
+
+ uninstallAddon(addon);
+
+ prefs.reset("hotfix.id");
+
+ run_next_test();
+});
+
+
+add_test(function test_get_all_ids() {
+ _("Ensures that getAllIDs() returns an appropriate set.");
+
+ _("Installing two addons.");
+ let addon1 = installAddon("test_install1");
+ let addon2 = installAddon("test_bootstrap1_1");
+
+ _("Ensure they're syncable.");
+ do_check_true(store.isAddonSyncable(addon1));
+ do_check_true(store.isAddonSyncable(addon2));
+
+ let ids = store.getAllIDs();
+
+ do_check_eq("object", typeof(ids));
+ do_check_eq(2, Object.keys(ids).length);
+ do_check_true(addon1.syncGUID in ids);
+ do_check_true(addon2.syncGUID in ids);
+
+ addon1.install.cancel();
+ uninstallAddon(addon2);
+
+ run_next_test();
+});
+
+add_test(function test_change_item_id() {
+ _("Ensures that changeItemID() works properly.");
+
+ let addon = installAddon("test_bootstrap1_1");
+
+ let oldID = addon.syncGUID;
+ let newID = Utils.makeGUID();
+
+ store.changeItemID(oldID, newID);
+
+ let newAddon = getAddonFromAddonManagerByID(addon.id);
+ do_check_neq(null, newAddon);
+ do_check_eq(newID, newAddon.syncGUID);
+
+ uninstallAddon(newAddon);
+
+ run_next_test();
+});
+
+add_test(function test_create() {
+ _("Ensure creating/installing an add-on from a record works.");
+
+ let server = createAndStartHTTPServer(HTTP_PORT);
+
+ let addon = installAddon("test_bootstrap1_1");
+ let id = addon.id;
+ uninstallAddon(addon);
+
+ let guid = Utils.makeGUID();
+ let record = createRecordForThisApp(guid, id, true, false);
+
+ let failed = store.applyIncomingBatch([record]);
+ do_check_eq(0, failed.length);
+
+ let newAddon = getAddonFromAddonManagerByID(id);
+ do_check_neq(null, newAddon);
+ do_check_eq(guid, newAddon.syncGUID);
+ do_check_false(newAddon.userDisabled);
+
+ uninstallAddon(newAddon);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_create_missing_search() {
+ _("Ensures that failed add-on searches are handled gracefully.");
+
+ let server = createAndStartHTTPServer(HTTP_PORT);
+
+ // The handler for this ID is not installed, so a search should 404.
+ const id = "missing@tests.mozilla.org";
+ let guid = Utils.makeGUID();
+ let record = createRecordForThisApp(guid, id, true, false);
+
+ let failed = store.applyIncomingBatch([record]);
+ do_check_eq(1, failed.length);
+ do_check_eq(guid, failed[0]);
+
+ let addon = getAddonFromAddonManagerByID(id);
+ do_check_eq(null, addon);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_create_bad_install() {
+ _("Ensures that add-ons without a valid install are handled gracefully.");
+
+ let server = createAndStartHTTPServer(HTTP_PORT);
+
+ // The handler returns a search result but the XPI will 404.
+ const id = "missing-xpi@tests.mozilla.org";
+ let guid = Utils.makeGUID();
+ let record = createRecordForThisApp(guid, id, true, false);
+
+ let failed = store.applyIncomingBatch([record]);
+ // This addon had no source URI so was skipped - but it's not treated as
+ // failure.
+ // XXX - this test isn't testing what we thought it was. Previously the addon
+ // was not being installed due to requireSecureURL checking *before* we'd
+ // attempted to get the XPI.
+ // With requireSecureURL disabled we do see a download failure, but the addon
+ // *does* get added to |failed|.
+ // FTR: onDownloadFailed() is called with ERROR_NETWORK_FAILURE, so it's going
+ // to be tricky to distinguish a 404 from other transient network errors
+ // where we do want the addon to end up in |failed|.
+ // This is being tracked in bug 1284778.
+ //do_check_eq(0, failed.length);
+
+ let addon = getAddonFromAddonManagerByID(id);
+ do_check_eq(null, addon);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_ignore_system() {
+ _("Ensure we ignore system addons");
+ // Our system addon should not appear in getAllIDs
+ engine._refreshReconcilerState();
+ let num = 0;
+ for (let guid in store.getAllIDs()) {
+ num += 1;
+ let addon = reconciler.getAddonStateFromSyncGUID(guid);
+ do_check_neq(addon.id, SYSTEM_ADDON_ID);
+ }
+ do_check_true(num > 1, "should have seen at least one.")
+ run_next_test();
+});
+
+add_test(function test_incoming_system() {
+ _("Ensure we handle incoming records that refer to a system addon");
+ // eg, loop initially had a normal addon but it was then "promoted" to be a
+ // system addon but wanted to keep the same ID. The server record exists due
+ // to this.
+
+ // before we start, ensure the system addon isn't disabled.
+ do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled);
+
+ // Now simulate an incoming record with the same ID as the system addon,
+ // but flagged as disabled - it should not be applied.
+ let server = createAndStartHTTPServer(HTTP_PORT);
+ // We make the incoming record flag the system addon as disabled - it should
+ // be ignored.
+ let guid = Utils.makeGUID();
+ let record = createRecordForThisApp(guid, SYSTEM_ADDON_ID, false, false);
+
+ let failed = store.applyIncomingBatch([record]);
+ do_check_eq(0, failed.length);
+
+ // The system addon should still not be userDisabled.
+ do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_wipe() {
+ _("Ensures that wiping causes add-ons to be uninstalled.");
+
+ let addon1 = installAddon("test_bootstrap1_1");
+
+ store.wipe();
+
+ let addon = getAddonFromAddonManagerByID(addon1.id);
+ do_check_eq(null, addon);
+
+ run_next_test();
+});
+
+add_test(function test_wipe_and_install() {
+ _("Ensure wipe followed by install works.");
+
+ // This tests the reset sync flow where remote data is replaced by local. The
+ // receiving client will see a wipe followed by a record which should undo
+ // the wipe.
+ let installed = installAddon("test_bootstrap1_1");
+
+ let record = createRecordForThisApp(installed.syncGUID, installed.id, true,
+ false);
+
+ store.wipe();
+
+ let deleted = getAddonFromAddonManagerByID(installed.id);
+ do_check_null(deleted);
+
+ // Re-applying the record can require re-fetching the XPI.
+ let server = createAndStartHTTPServer(HTTP_PORT);
+
+ store.applyIncoming(record);
+
+ let fetched = getAddonFromAddonManagerByID(record.addonID);
+ do_check_true(!!fetched);
+
+ server.stop(run_next_test);
+});
+
+add_test(function cleanup() {
+ // There's an xpcom-shutdown hook for this, but let's give this a shot.
+ reconciler.stopListening();
+ run_next_test();
+});
+
diff --git a/services/sync/tests/unit/test_addons_tracker.js b/services/sync/tests/unit/test_addons_tracker.js
new file mode 100644
index 000000000..01bf37ab9
--- /dev/null
+++ b/services/sync/tests/unit/test_addons_tracker.js
@@ -0,0 +1,177 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/AddonManager.jsm");
+Cu.import("resource://services-sync/engines/addons.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+loadAddonTestFunctions();
+startupManager();
+Svc.Prefs.set("engine.addons", true);
+
+Service.engineManager.register(AddonsEngine);
+var engine = Service.engineManager.get("addons");
+var reconciler = engine._reconciler;
+var store = engine._store;
+var tracker = engine._tracker;
+
+// Don't write out by default.
+tracker.persistChangedIDs = false;
+
+const addon1ID = "addon1@tests.mozilla.org";
+
+function cleanup_and_advance() {
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ tracker.stopTracking();
+
+ tracker.resetScore();
+ tracker.clearChangedIDs();
+
+ reconciler._addons = {};
+ reconciler._changes = [];
+ let cb = Async.makeSpinningCallback();
+ reconciler.saveState(null, cb);
+ cb.wait();
+
+ run_next_test();
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Addons").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.AddonsReconciler").level =
+ Log.Level.Trace;
+
+ cleanup_and_advance();
+}
+
+add_test(function test_empty() {
+ _("Verify the tracker is empty to start with.");
+
+ do_check_eq(0, Object.keys(tracker.changedIDs).length);
+ do_check_eq(0, tracker.score);
+
+ cleanup_and_advance();
+});
+
+add_test(function test_not_tracking() {
+ _("Ensures the tracker doesn't do anything when it isn't tracking.");
+
+ let addon = installAddon("test_bootstrap1_1");
+ uninstallAddon(addon);
+
+ do_check_eq(0, Object.keys(tracker.changedIDs).length);
+ do_check_eq(0, tracker.score);
+
+ cleanup_and_advance();
+});
+
+add_test(function test_track_install() {
+ _("Ensure that installing an add-on notifies tracker.");
+
+ reconciler.startListening();
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+
+ do_check_eq(0, tracker.score);
+ let addon = installAddon("test_bootstrap1_1");
+ let changed = tracker.changedIDs;
+
+ do_check_eq(1, Object.keys(changed).length);
+ do_check_true(addon.syncGUID in changed);
+ do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
+
+ uninstallAddon(addon);
+ cleanup_and_advance();
+});
+
+add_test(function test_track_uninstall() {
+ _("Ensure that uninstalling an add-on notifies tracker.");
+
+ reconciler.startListening();
+
+ let addon = installAddon("test_bootstrap1_1");
+ let guid = addon.syncGUID;
+ do_check_eq(0, tracker.score);
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+
+ uninstallAddon(addon);
+ let changed = tracker.changedIDs;
+ do_check_eq(1, Object.keys(changed).length);
+ do_check_true(guid in changed);
+ do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
+
+ cleanup_and_advance();
+});
+
+add_test(function test_track_user_disable() {
+ _("Ensure that tracker sees disabling of add-on");
+
+ reconciler.startListening();
+
+ let addon = installAddon("test_bootstrap1_1");
+ do_check_false(addon.userDisabled);
+ do_check_false(addon.appDisabled);
+ do_check_true(addon.isActive);
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+ do_check_eq(0, tracker.score);
+
+ let cb = Async.makeSyncCallback();
+
+ let listener = {
+ onDisabled: function(disabled) {
+ _("onDisabled");
+ if (disabled.id == addon.id) {
+ AddonManager.removeAddonListener(listener);
+ cb();
+ }
+ },
+ onDisabling: function(disabling) {
+ _("onDisabling add-on");
+ }
+ };
+ AddonManager.addAddonListener(listener);
+
+ _("Disabling add-on");
+ addon.userDisabled = true;
+ _("Disabling started...");
+ Async.waitForSyncCallback(cb);
+
+ let changed = tracker.changedIDs;
+ do_check_eq(1, Object.keys(changed).length);
+ do_check_true(addon.syncGUID in changed);
+ do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
+
+ uninstallAddon(addon);
+ cleanup_and_advance();
+});
+
+add_test(function test_track_enable() {
+ _("Ensure that enabling a disabled add-on notifies tracker.");
+
+ reconciler.startListening();
+
+ let addon = installAddon("test_bootstrap1_1");
+ addon.userDisabled = true;
+ store._sleep(0);
+
+ do_check_eq(0, tracker.score);
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+ addon.userDisabled = false;
+ store._sleep(0);
+
+ let changed = tracker.changedIDs;
+ do_check_eq(1, Object.keys(changed).length);
+ do_check_true(addon.syncGUID in changed);
+ do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
+
+ uninstallAddon(addon);
+ cleanup_and_advance();
+});
diff --git a/services/sync/tests/unit/test_bookmark_batch_fail.js b/services/sync/tests/unit/test_bookmark_batch_fail.js
new file mode 100644
index 000000000..cf52fefb7
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_batch_fail.js
@@ -0,0 +1,23 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Making sure a failing sync reports a useful error");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ let engine = new BookmarksEngine(Service);
+ engine._syncStartup = function() {
+ throw "FAIL!";
+ };
+
+ try {
+ _("Try calling the sync that should throw right away");
+ engine._sync();
+ do_throw("Should have failed sync!");
+ }
+ catch(ex) {
+ _("Making sure what we threw ended up as the exception:", ex);
+ do_check_eq(ex, "FAIL!");
+ }
+}
diff --git a/services/sync/tests/unit/test_bookmark_duping.js b/services/sync/tests/unit/test_bookmark_duping.js
new file mode 100644
index 000000000..1e6c6ed2e
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_duping.js
@@ -0,0 +1,644 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://services-sync/bookmark_validator.js");
+
+
+initTestLogging("Trace");
+
+const bms = PlacesUtils.bookmarks;
+
+Service.engineManager.register(BookmarksEngine);
+
+const engine = new BookmarksEngine(Service);
+const store = engine._store;
+store._log.level = Log.Level.Trace;
+engine._log.level = Log.Level.Trace;
+
+function promiseOneObserver(topic) {
+ return new Promise((resolve, reject) => {
+ let observer = function(subject, topic, data) {
+ Services.obs.removeObserver(observer, topic);
+ resolve({ subject: subject, data: data });
+ }
+ Services.obs.addObserver(observer, topic, false);
+ });
+}
+
+function setup() {
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {bookmarks: {version: engine.version,
+ syncID: engine.syncID}}}},
+ bookmarks: {},
+ });
+
+ generateNewKeys(Service.collectionKeys);
+
+ new SyncTestingInfrastructure(server.server);
+
+ let collection = server.user("foo").collection("bookmarks");
+
+ Svc.Obs.notify("weave:engine:start-tracking"); // We skip usual startup...
+
+ return { server, collection };
+}
+
+function* cleanup(server) {
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", true);
+ let promiseStartOver = promiseOneObserver("weave:service:start-over:finish");
+ Service.startOver();
+ yield promiseStartOver;
+ yield new Promise(resolve => server.stop(resolve));
+ yield bms.eraseEverything();
+}
+
+function getFolderChildrenIDs(folderId) {
+ let index = 0;
+ let result = [];
+ while (true) {
+ let childId = bms.getIdForItemAt(folderId, index);
+ if (childId == -1) {
+ break;
+ }
+ result.push(childId);
+ index++;
+ }
+ return result;
+}
+
+function createFolder(parentId, title) {
+ let id = bms.createFolder(parentId, title, 0);
+ let guid = store.GUIDForId(id);
+ return { id, guid };
+}
+
+function createBookmark(parentId, url, title, index = bms.DEFAULT_INDEX) {
+ let uri = Utils.makeURI(url);
+ let id = bms.insertBookmark(parentId, uri, index, title)
+ let guid = store.GUIDForId(id);
+ return { id, guid };
+}
+
+function getServerRecord(collection, id) {
+ let wbo = collection.get({ full: true, ids: [id] });
+ // Whew - lots of json strings inside strings.
+ return JSON.parse(JSON.parse(JSON.parse(wbo).payload).ciphertext);
+}
+
+function* promiseNoLocalItem(guid) {
+ // Check there's no item with the specified guid.
+ let got = yield bms.fetch({ guid });
+ ok(!got, `No record remains with GUID ${guid}`);
+ // and while we are here ensure the places cache doesn't still have it.
+ yield Assert.rejects(PlacesUtils.promiseItemId(guid));
+}
+
+function* validate(collection, expectedFailures = []) {
+ let validator = new BookmarkValidator();
+ let records = collection.payloads();
+
+ let problems = validator.inspectServerRecords(records).problemData;
+ // all non-zero problems.
+ let summary = problems.getSummary().filter(prob => prob.count != 0);
+
+ // split into 2 arrays - expected and unexpected.
+ let isInExpectedFailures = elt => {
+ for (let i = 0; i < expectedFailures.length; i++) {
+ if (elt.name == expectedFailures[i].name && elt.count == expectedFailures[i].count) {
+ return true;
+ }
+ }
+ return false;
+ }
+ let expected = [];
+ let unexpected = [];
+ for (let elt of summary) {
+ (isInExpectedFailures(elt) ? expected : unexpected).push(elt);
+ }
+ if (unexpected.length || expected.length != expectedFailures.length) {
+ do_print("Validation failed:");
+ do_print(JSON.stringify(summary));
+ // print the entire validator output as it has IDs etc.
+ do_print(JSON.stringify(problems, undefined, 2));
+ // All server records and the entire bookmark tree.
+ do_print("Server records:\n" + JSON.stringify(collection.payloads(), undefined, 2));
+ let tree = yield PlacesUtils.promiseBookmarksTree("", { includeItemIds: true });
+ do_print("Local bookmark tree:\n" + JSON.stringify(tree, undefined, 2));
+ ok(false);
+ }
+}
+
+add_task(function* test_dupe_bookmark() {
+ _("Ensure that a bookmark we consider a dupe is handled correctly.");
+
+ let { server, collection } = this.setup();
+
+ try {
+ // The parent folder and one bookmark in it.
+ let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+ let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+
+ engine.sync();
+
+ // We've added the bookmark, its parent (folder1) plus "menu", "toolbar", "unfiled", and "mobile".
+ equal(collection.count(), 6);
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+
+ // Now create a new incoming record that looks alot like a dupe.
+ let newGUID = Utils.makeGUID();
+ let to_apply = {
+ id: newGUID,
+ bmkUri: "http://getfirefox.com/",
+ type: "bookmark",
+ title: "Get Firefox!",
+ parentName: "Folder 1",
+ parentid: folder1_guid,
+ };
+
+ collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 10);
+ _("Syncing so new dupe record is processed");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ // We should have logically deleted the dupe record.
+ equal(collection.count(), 7);
+ ok(getServerRecord(collection, bmk1_guid).deleted);
+ // and physically removed from the local store.
+ yield promiseNoLocalItem(bmk1_guid);
+ // Parent should still only have 1 item.
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+ // The parent record on the server should now reference the new GUID and not the old.
+ let serverRecord = getServerRecord(collection, folder1_guid);
+ ok(!serverRecord.children.includes(bmk1_guid));
+ ok(serverRecord.children.includes(newGUID));
+
+ // and a final sanity check - use the validator
+ yield validate(collection);
+ } finally {
+ yield cleanup(server);
+ }
+});
+
+add_task(function* test_dupe_reparented_bookmark() {
+ _("Ensure that a bookmark we consider a dupe from a different parent is handled correctly");
+
+ let { server, collection } = this.setup();
+
+ try {
+ // The parent folder and one bookmark in it.
+ let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+ let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ // Another parent folder *with the same name*
+ let {id: folder2_id, guid: folder2_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+
+ do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
+
+ engine.sync();
+
+ // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
+ equal(collection.count(), 7);
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+ equal(getFolderChildrenIDs(folder2_id).length, 0);
+
+ // Now create a new incoming record that looks alot like a dupe of the
+ // item in folder1_guid, but with a record that points to folder2_guid.
+ let newGUID = Utils.makeGUID();
+ let to_apply = {
+ id: newGUID,
+ bmkUri: "http://getfirefox.com/",
+ type: "bookmark",
+ title: "Get Firefox!",
+ parentName: "Folder 1",
+ parentid: folder2_guid,
+ };
+
+ collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 10);
+
+ _("Syncing so new dupe record is processed");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ // We should have logically deleted the dupe record.
+ equal(collection.count(), 8);
+ ok(getServerRecord(collection, bmk1_guid).deleted);
+ // and physically removed from the local store.
+ yield promiseNoLocalItem(bmk1_guid);
+ // The original folder no longer has the item
+ equal(getFolderChildrenIDs(folder1_id).length, 0);
+ // But the second dupe folder does.
+ equal(getFolderChildrenIDs(folder2_id).length, 1);
+
+ // The record for folder1 on the server should reference neither old or new GUIDs.
+ let serverRecord1 = getServerRecord(collection, folder1_guid);
+ ok(!serverRecord1.children.includes(bmk1_guid));
+ ok(!serverRecord1.children.includes(newGUID));
+
+ // The record for folder2 on the server should only reference the new new GUID.
+ let serverRecord2 = getServerRecord(collection, folder2_guid);
+ ok(!serverRecord2.children.includes(bmk1_guid));
+ ok(serverRecord2.children.includes(newGUID));
+
+ // and a final sanity check - use the validator
+ yield validate(collection);
+ } finally {
+ yield cleanup(server);
+ }
+});
+
+add_task(function* test_dupe_reparented_locally_changed_bookmark() {
+ _("Ensure that a bookmark with local changes we consider a dupe from a different parent is handled correctly");
+
+ let { server, collection } = this.setup();
+
+ try {
+ // The parent folder and one bookmark in it.
+ let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+ let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ // Another parent folder *with the same name*
+ let {id: folder2_id, guid: folder2_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+
+ do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
+
+ engine.sync();
+
+ // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
+ equal(collection.count(), 7);
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+ equal(getFolderChildrenIDs(folder2_id).length, 0);
+
+ // Now create a new incoming record that looks alot like a dupe of the
+ // item in folder1_guid, but with a record that points to folder2_guid.
+ let newGUID = Utils.makeGUID();
+ let to_apply = {
+ id: newGUID,
+ bmkUri: "http://getfirefox.com/",
+ type: "bookmark",
+ title: "Get Firefox!",
+ parentName: "Folder 1",
+ parentid: folder2_guid,
+ };
+
+ collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 10);
+
+ // Make a change to the bookmark that's a dupe, and set the modification
+ // time further in the future than the incoming record. This will cause
+ // us to issue the infamous "DATA LOSS" warning in the logs but cause us
+ // to *not* apply the incoming record.
+ engine._tracker.addChangedID(bmk1_guid, Date.now() / 1000 + 60);
+
+ _("Syncing so new dupe record is processed");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ // We should have logically deleted the dupe record.
+ equal(collection.count(), 8);
+ ok(getServerRecord(collection, bmk1_guid).deleted);
+ // and physically removed from the local store.
+ yield promiseNoLocalItem(bmk1_guid);
+ // The original folder still longer has the item
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+ // The second folder does not.
+ equal(getFolderChildrenIDs(folder2_id).length, 0);
+
+ // The record for folder1 on the server should reference only the GUID.
+ let serverRecord1 = getServerRecord(collection, folder1_guid);
+ ok(!serverRecord1.children.includes(bmk1_guid));
+ ok(serverRecord1.children.includes(newGUID));
+
+ // The record for folder2 on the server should reference nothing.
+ let serverRecord2 = getServerRecord(collection, folder2_guid);
+ ok(!serverRecord2.children.includes(bmk1_guid));
+ ok(!serverRecord2.children.includes(newGUID));
+
+ // and a final sanity check - use the validator
+ yield validate(collection);
+ } finally {
+ yield cleanup(server);
+ }
+});
+
+add_task(function* test_dupe_reparented_to_earlier_appearing_parent_bookmark() {
+ _("Ensure that a bookmark we consider a dupe from a different parent that " +
+ "appears in the same sync before the dupe item");
+
+ let { server, collection } = this.setup();
+
+ try {
+ // The parent folder and one bookmark in it.
+ let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+ let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ // One more folder we'll use later.
+ let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
+
+ do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
+
+ engine.sync();
+
+ // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
+ equal(collection.count(), 7);
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+
+ let newGUID = Utils.makeGUID();
+ let newParentGUID = Utils.makeGUID();
+
+ // Have the new parent appear before the dupe item.
+ collection.insert(newParentGUID, encryptPayload({
+ id: newParentGUID,
+ type: "folder",
+ title: "Folder 1",
+ parentName: "A second folder",
+ parentid: folder2_guid,
+ children: [newGUID],
+ tags: [],
+ }), Date.now() / 1000 + 10);
+
+ // And also the update to "folder 2" that references the new parent.
+ collection.insert(folder2_guid, encryptPayload({
+ id: folder2_guid,
+ type: "folder",
+ title: "A second folder",
+ parentName: "Bookmarks Toolbar",
+ parentid: "toolbar",
+ children: [newParentGUID],
+ tags: [],
+ }), Date.now() / 1000 + 10);
+
+ // Now create a new incoming record that looks alot like a dupe of the
+ // item in folder1_guid, with a record that points to a parent with the
+ // same name which appeared earlier in this sync.
+ collection.insert(newGUID, encryptPayload({
+ id: newGUID,
+ bmkUri: "http://getfirefox.com/",
+ type: "bookmark",
+ title: "Get Firefox!",
+ parentName: "Folder 1",
+ parentid: newParentGUID,
+ tags: [],
+ }), Date.now() / 1000 + 10);
+
+
+ _("Syncing so new records are processed.");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ // Everything should be parented correctly.
+ equal(getFolderChildrenIDs(folder1_id).length, 0);
+ let newParentID = store.idForGUID(newParentGUID);
+ let newID = store.idForGUID(newGUID);
+ deepEqual(getFolderChildrenIDs(newParentID), [newID]);
+
+ // Make sure the validator thinks everything is hunky-dory.
+ yield validate(collection);
+ } finally {
+ yield cleanup(server);
+ }
+});
+
+add_task(function* test_dupe_reparented_to_later_appearing_parent_bookmark() {
+ _("Ensure that a bookmark we consider a dupe from a different parent that " +
+ "doesn't exist locally as we process the child, but does appear in the same sync");
+
+ let { server, collection } = this.setup();
+
+ try {
+ // The parent folder and one bookmark in it.
+ let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+ let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ // One more folder we'll use later.
+ let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
+
+ do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
+
+ engine.sync();
+
+ // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
+ equal(collection.count(), 7);
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+
+ // Now create a new incoming record that looks alot like a dupe of the
+ // item in folder1_guid, but with a record that points to a parent with the
+ // same name, but a non-existing local ID.
+ let newGUID = Utils.makeGUID();
+ let newParentGUID = Utils.makeGUID();
+
+ collection.insert(newGUID, encryptPayload({
+ id: newGUID,
+ bmkUri: "http://getfirefox.com/",
+ type: "bookmark",
+ title: "Get Firefox!",
+ parentName: "Folder 1",
+ parentid: newParentGUID,
+ tags: [],
+ }), Date.now() / 1000 + 10);
+
+ // Now have the parent appear after (so when the record above is processed
+ // this is still unknown.)
+ collection.insert(newParentGUID, encryptPayload({
+ id: newParentGUID,
+ type: "folder",
+ title: "Folder 1",
+ parentName: "A second folder",
+ parentid: folder2_guid,
+ children: [newGUID],
+ tags: [],
+ }), Date.now() / 1000 + 10);
+ // And also the update to "folder 2" that references the new parent.
+ collection.insert(folder2_guid, encryptPayload({
+ id: folder2_guid,
+ type: "folder",
+ title: "A second folder",
+ parentName: "Bookmarks Toolbar",
+ parentid: "toolbar",
+ children: [newParentGUID],
+ tags: [],
+ }), Date.now() / 1000 + 10);
+
+ _("Syncing so out-of-order records are processed.");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ // The intended parent did end up existing, so it should be parented
+ // correctly after de-duplication.
+ equal(getFolderChildrenIDs(folder1_id).length, 0);
+ let newParentID = store.idForGUID(newParentGUID);
+ let newID = store.idForGUID(newGUID);
+ deepEqual(getFolderChildrenIDs(newParentID), [newID]);
+
+ // Make sure the validator thinks everything is hunky-dory.
+ yield validate(collection);
+ } finally {
+ yield cleanup(server);
+ }
+});
+
+add_task(function* test_dupe_reparented_to_future_arriving_parent_bookmark() {
+ _("Ensure that a bookmark we consider a dupe from a different parent that " +
+ "doesn't exist locally and doesn't appear in this Sync is handled correctly");
+
+ let { server, collection } = this.setup();
+
+ try {
+ // The parent folder and one bookmark in it.
+ let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+ let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ // One more folder we'll use later.
+ let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
+
+ do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
+
+ engine.sync();
+
+ // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
+ equal(collection.count(), 7);
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+
+ // Now create a new incoming record that looks alot like a dupe of the
+ // item in folder1_guid, but with a record that points to a parent with the
+ // same name, but a non-existing local ID.
+ let newGUID = Utils.makeGUID();
+ let newParentGUID = Utils.makeGUID();
+
+ collection.insert(newGUID, encryptPayload({
+ id: newGUID,
+ bmkUri: "http://getfirefox.com/",
+ type: "bookmark",
+ title: "Get Firefox!",
+ parentName: "Folder 1",
+ parentid: newParentGUID,
+ tags: [],
+ }), Date.now() / 1000 + 10);
+
+ _("Syncing so new dupe record is processed");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ // We should have logically deleted the dupe record.
+ equal(collection.count(), 8);
+ ok(getServerRecord(collection, bmk1_guid).deleted);
+ // and physically removed from the local store.
+ yield promiseNoLocalItem(bmk1_guid);
+ // The intended parent doesn't exist, so it remains in the original folder
+ equal(getFolderChildrenIDs(folder1_id).length, 1);
+
+ // The record for folder1 on the server should reference the new GUID.
+ let serverRecord1 = getServerRecord(collection, folder1_guid);
+ ok(!serverRecord1.children.includes(bmk1_guid));
+ ok(serverRecord1.children.includes(newGUID));
+
+ // As the incoming parent is missing the item should have been annotated
+ // with that missing parent.
+ equal(PlacesUtils.annotations.getItemAnnotation(store.idForGUID(newGUID), "sync/parent"),
+ newParentGUID);
+
+ // Check the validator. Sadly, this is known to cause a mismatch between
+ // the server and client views of the tree.
+ let expected = [
+ // We haven't fixed the incoming record that referenced the missing parent.
+ { name: "orphans", count: 1 },
+ ];
+ yield validate(collection, expected);
+
+ // Now have the parent magically appear in a later sync - but
+ // it appears as being in a different parent from our existing "Folder 1",
+ // so the folder itself isn't duped.
+ collection.insert(newParentGUID, encryptPayload({
+ id: newParentGUID,
+ type: "folder",
+ title: "Folder 1",
+ parentName: "A second folder",
+ parentid: folder2_guid,
+ children: [newGUID],
+ tags: [],
+ }), Date.now() / 1000 + 10);
+ // We also queue an update to "folder 2" that references the new parent.
+ collection.insert(folder2_guid, encryptPayload({
+ id: folder2_guid,
+ type: "folder",
+ title: "A second folder",
+ parentName: "Bookmarks Toolbar",
+ parentid: "toolbar",
+ children: [newParentGUID],
+ tags: [],
+ }), Date.now() / 1000 + 10);
+
+ _("Syncing so missing parent appears");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ // The intended parent now does exist, so it should have been reparented.
+ equal(getFolderChildrenIDs(folder1_id).length, 0);
+ let newParentID = store.idForGUID(newParentGUID);
+ let newID = store.idForGUID(newGUID);
+ deepEqual(getFolderChildrenIDs(newParentID), [newID]);
+
+ // validation now has different errors :(
+ expected = [
+ // The validator reports multipleParents because:
+ // * The incoming record newParentGUID still (and correctly) references
+ // newGUID as a child.
+ // * Our original Folder1 was updated to include newGUID when it
+ // originally de-deuped and couldn't find the parent.
+ // * When the parent *did* eventually arrive we used the parent annotation
+ // to correctly reparent - but that reparenting process does not change
+ // the server record.
+ // Hence, newGUID is a child of both those server records :(
+ { name: "multipleParents", count: 1 },
+ ];
+ yield validate(collection, expected);
+
+ } finally {
+ yield cleanup(server);
+ }
+});
+
+add_task(function* test_dupe_empty_folder() {
+ _("Ensure that an empty folder we consider a dupe is handled correctly.");
+ // Empty folders aren't particularly interesting in practice (as that seems
+ // an edge-case) but duping folders with items is broken - bug 1293163.
+ let { server, collection } = this.setup();
+
+ try {
+ // The folder we will end up duping away.
+ let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+
+ engine.sync();
+
+ // We've added 1 folder, "menu", "toolbar", "unfiled", and "mobile".
+ equal(collection.count(), 5);
+
+ // Now create new incoming records that looks alot like a dupe of "Folder 1".
+ let newFolderGUID = Utils.makeGUID();
+ collection.insert(newFolderGUID, encryptPayload({
+ id: newFolderGUID,
+ type: "folder",
+ title: "Folder 1",
+ parentName: "Bookmarks Toolbar",
+ parentid: "toolbar",
+ children: [],
+ }), Date.now() / 1000 + 10);
+
+ _("Syncing so new dupe records are processed");
+ engine.lastSync = engine.lastSync - 0.01;
+ engine.sync();
+
+ yield validate(collection);
+
+ // Collection now has one additional record - the logically deleted dupe.
+ equal(collection.count(), 6);
+ // original folder should be logically deleted.
+ ok(getServerRecord(collection, folder1_guid).deleted);
+ yield promiseNoLocalItem(folder1_guid);
+ } finally {
+ yield cleanup(server);
+ }
+});
+// XXX - TODO - folders with children. Bug 1293163
diff --git a/services/sync/tests/unit/test_bookmark_engine.js b/services/sync/tests/unit/test_bookmark_engine.js
new file mode 100644
index 000000000..9de6c5c0d
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -0,0 +1,665 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
+Cu.import("resource://gre/modules/BookmarkJSONUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/Promise.jsm");
+
+initTestLogging("Trace");
+
+Service.engineManager.register(BookmarksEngine);
+
+function* assertChildGuids(folderGuid, expectedChildGuids, message) {
+ let tree = yield PlacesUtils.promiseBookmarksTree(folderGuid);
+ let childGuids = tree.children.map(child => child.guid);
+ deepEqual(childGuids, expectedChildGuids, message);
+}
+
+add_task(function* test_change_during_sync() {
+ _("Ensure that we track changes made during a sync.");
+
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let tracker = engine._tracker;
+ let server = serverForFoo(engine);
+ new SyncTestingInfrastructure(server.server);
+
+ let collection = server.user("foo").collection("bookmarks");
+
+ let bz_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarksMenuFolderId, Utils.makeURI("https://bugzilla.mozilla.org/"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Bugzilla");
+ let bz_guid = yield PlacesUtils.promiseItemGuid(bz_id);
+ _(`Bugzilla GUID: ${bz_guid}`);
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+
+ try {
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
+ let folder1_guid = store.GUIDForId(folder1_id);
+ _(`Folder GUID: ${folder1_guid}`);
+
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, Utils.makeURI("http://getthunderbird.com/"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+ _(`Thunderbird GUID: ${bmk1_guid}`);
+
+ // Sync is synchronous, so, to simulate a bookmark change made during a
+ // sync, we create a server record that adds a bookmark as a side effect.
+ let bmk2_guid = "get-firefox1"; // New child of Folder 1, created remotely.
+ let bmk3_id = -1; // New child of Folder 1, created locally during sync.
+ let folder2_guid = "folder2-1111"; // New folder, created remotely.
+ let tagQuery_guid = "tag-query111"; // New tag query child of Folder 2, created remotely.
+ let bmk4_guid = "example-org1"; // New tagged child of Folder 2, created remotely.
+ {
+ // An existing record changed on the server that should not trigger
+ // another sync when applied.
+ let bzBmk = new Bookmark("bookmarks", bz_guid);
+ bzBmk.bmkUri = "https://bugzilla.mozilla.org/";
+ bzBmk.description = "New description";
+ bzBmk.title = "Bugzilla";
+ bzBmk.tags = ["new", "tags"];
+ bzBmk.parentName = "Bookmarks Toolbar";
+ bzBmk.parentid = "toolbar";
+ collection.insert(bz_guid, encryptPayload(bzBmk.cleartext));
+
+ let remoteFolder = new BookmarkFolder("bookmarks", folder2_guid);
+ remoteFolder.title = "Folder 2";
+ remoteFolder.children = [bmk4_guid, tagQuery_guid];
+ remoteFolder.parentName = "Bookmarks Menu";
+ remoteFolder.parentid = "menu";
+ collection.insert(folder2_guid, encryptPayload(remoteFolder.cleartext));
+
+ let localFxBmk = new Bookmark("bookmarks", bmk2_guid);
+ localFxBmk.bmkUri = "http://getfirefox.com/";
+ localFxBmk.description = "Firefox is awesome.";
+ localFxBmk.title = "Get Firefox!";
+ localFxBmk.tags = ["firefox", "awesome", "browser"];
+ localFxBmk.keyword = "awesome";
+ localFxBmk.loadInSidebar = false;
+ localFxBmk.parentName = "Folder 1";
+ localFxBmk.parentid = folder1_guid;
+ let remoteFxBmk = collection.insert(bmk2_guid, encryptPayload(localFxBmk.cleartext));
+ remoteFxBmk.get = function get() {
+ _("Inserting bookmark into local store");
+ bmk3_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, Utils.makeURI("https://mozilla.org/"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Mozilla");
+
+ return ServerWBO.prototype.get.apply(this, arguments);
+ };
+
+ // A tag query referencing a nonexistent tag folder, which we should
+ // create locally when applying the record.
+ let localTagQuery = new BookmarkQuery("bookmarks", tagQuery_guid);
+ localTagQuery.bmkUri = "place:type=7&folder=999";
+ localTagQuery.title = "Taggy tags";
+ localTagQuery.folderName = "taggy";
+ localTagQuery.parentName = "Folder 2";
+ localTagQuery.parentid = folder2_guid;
+ collection.insert(tagQuery_guid, encryptPayload(localTagQuery.cleartext));
+
+ // A bookmark that should appear in the results for the tag query.
+ let localTaggedBmk = new Bookmark("bookmarks", bmk4_guid);
+ localTaggedBmk.bmkUri = "https://example.org";
+ localTaggedBmk.title = "Tagged bookmark";
+ localTaggedBmk.tags = ["taggy"];
+ localTaggedBmk.parentName = "Folder 2";
+ localTaggedBmk.parentid = folder2_guid;
+ collection.insert(bmk4_guid, encryptPayload(localTaggedBmk.cleartext));
+ }
+
+ yield* assertChildGuids(folder1_guid, [bmk1_guid], "Folder should have 1 child before first sync");
+
+ _("Perform first sync");
+ {
+ let changes = engine.pullNewChanges();
+ deepEqual(changes.ids().sort(), [folder1_guid, bmk1_guid, "toolbar"].sort(),
+ "Should track bookmark and folder created before first sync");
+ yield sync_engine_and_validate_telem(engine, false);
+ }
+
+ let bmk2_id = store.idForGUID(bmk2_guid);
+ let bmk3_guid = store.GUIDForId(bmk3_id);
+ _(`Mozilla GUID: ${bmk3_guid}`);
+ {
+ equal(store.GUIDForId(bmk2_id), bmk2_guid,
+ "Remote bookmark should be applied during first sync");
+ ok(bmk3_id > -1,
+ "Bookmark created during first sync should exist locally");
+ ok(!collection.wbo(bmk3_guid),
+ "Bookmark created during first sync shouldn't be uploaded yet");
+
+ yield* assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid],
+ "Folder 1 should have 3 children after first sync");
+ yield* assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
+ "Folder 2 should have 2 children after first sync");
+ let taggedURIs = PlacesUtils.tagging.getURIsForTag("taggy");
+ equal(taggedURIs.length, 1, "Should have 1 tagged URI");
+ equal(taggedURIs[0].spec, "https://example.org/",
+ "Synced tagged bookmark should appear in tagged URI list");
+ }
+
+ _("Perform second sync");
+ {
+ let changes = engine.pullNewChanges();
+ deepEqual(changes.ids().sort(), [bmk3_guid, folder1_guid].sort(),
+ "Should track bookmark added during last sync and its parent");
+ yield sync_engine_and_validate_telem(engine, false);
+
+ ok(collection.wbo(bmk3_guid),
+ "Bookmark created during first sync should be uploaded during second sync");
+
+ yield* assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid],
+ "Folder 1 should have same children after second sync");
+ yield* assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
+ "Folder 2 should have same children after second sync");
+ }
+ } finally {
+ store.wipe();
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ yield new Promise(resolve => server.stop(resolve));
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ }
+});
+
+add_task(function* bad_record_allIDs() {
+ let server = new SyncServer();
+ server.start();
+ let syncTesting = new SyncTestingInfrastructure(server.server);
+
+ _("Ensure that bad Places queries don't cause an error in getAllIDs.");
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let badRecordID = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder,
+ Utils.makeURI("place:folder=1138"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ null);
+
+ do_check_true(badRecordID > 0);
+ _("Record is " + badRecordID);
+ _("Type: " + PlacesUtils.bookmarks.getItemType(badRecordID));
+
+ _("Fetching all IDs.");
+ let all = store.getAllIDs();
+
+ _("All IDs: " + JSON.stringify(all));
+ do_check_true("menu" in all);
+ do_check_true("toolbar" in all);
+
+ _("Clean up.");
+ PlacesUtils.bookmarks.removeItem(badRecordID);
+ yield new Promise(r => server.stop(r));
+});
+
+function serverForFoo(engine) {
+ return serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {bookmarks: {version: engine.version,
+ syncID: engine.syncID}}}},
+ bookmarks: {}
+ });
+}
+
+add_task(function* test_processIncoming_error_orderChildren() {
+ _("Ensure that _orderChildren() is called even when _processIncoming() throws an error.");
+
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let server = serverForFoo(engine);
+ new SyncTestingInfrastructure(server.server);
+
+ let collection = server.user("foo").collection("bookmarks");
+
+ try {
+
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
+ let folder1_guid = store.GUIDForId(folder1_id);
+
+ let fxuri = Utils.makeURI("http://getfirefox.com/");
+ let tburi = Utils.makeURI("http://getthunderbird.com/");
+
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+ let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
+ let bmk2_guid = store.GUIDForId(bmk2_id);
+
+ // Create a server record for folder1 where we flip the order of
+ // the children.
+ let folder1_payload = store.createRecord(folder1_guid).cleartext;
+ folder1_payload.children.reverse();
+ collection.insert(folder1_guid, encryptPayload(folder1_payload));
+
+ // Create a bogus record that when synced down will provoke a
+ // network error which in turn provokes an exception in _processIncoming.
+ const BOGUS_GUID = "zzzzzzzzzzzz";
+ let bogus_record = collection.insert(BOGUS_GUID, "I'm a bogus record!");
+ bogus_record.get = function get() {
+ throw "Sync this!";
+ };
+
+ // Make the 10 minutes old so it will only be synced in the toFetch phase.
+ bogus_record.modified = Date.now() / 1000 - 60 * 10;
+ engine.lastSync = Date.now() / 1000 - 60;
+ engine.toFetch = [BOGUS_GUID];
+
+ let error;
+ try {
+ yield sync_engine_and_validate_telem(engine, true)
+ } catch(ex) {
+ error = ex;
+ }
+ ok(!!error);
+
+ // Verify that the bookmark order has been applied.
+ let new_children = store.createRecord(folder1_guid).children;
+ do_check_eq(new_children.length, 2);
+ do_check_eq(new_children[0], folder1_payload.children[0]);
+ do_check_eq(new_children[1], folder1_payload.children[1]);
+
+ do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 1);
+ do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 0);
+
+ } finally {
+ store.wipe();
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ yield new Promise(resolve => server.stop(resolve));
+ }
+});
+
+add_task(function* test_restorePromptsReupload() {
+ _("Ensure that restoring from a backup will reupload all records.");
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let server = serverForFoo(engine);
+ new SyncTestingInfrastructure(server.server);
+
+ let collection = server.user("foo").collection("bookmarks");
+
+ Svc.Obs.notify("weave:engine:start-tracking"); // We skip usual startup...
+
+ try {
+
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
+ let folder1_guid = store.GUIDForId(folder1_id);
+ _("Folder 1: " + folder1_id + ", " + folder1_guid);
+
+ let fxuri = Utils.makeURI("http://getfirefox.com/");
+ let tburi = Utils.makeURI("http://getthunderbird.com/");
+
+ _("Create a single record.");
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+ _("Get Firefox!: " + bmk1_id + ", " + bmk1_guid);
+
+
+ let dirSvc = Cc["@mozilla.org/file/directory_service;1"]
+ .getService(Ci.nsIProperties);
+
+ let backupFile = dirSvc.get("TmpD", Ci.nsILocalFile);
+
+ _("Make a backup.");
+ backupFile.append("t_b_e_" + Date.now() + ".json");
+
+ _("Backing up to file " + backupFile.path);
+ yield BookmarkJSONUtils.exportToFile(backupFile.path);
+
+ _("Create a different record and sync.");
+ let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
+ let bmk2_guid = store.GUIDForId(bmk2_id);
+ _("Get Thunderbird!: " + bmk2_id + ", " + bmk2_guid);
+
+ PlacesUtils.bookmarks.removeItem(bmk1_id);
+
+ let error;
+ try {
+ yield sync_engine_and_validate_telem(engine, false);
+ } catch(ex) {
+ error = ex;
+ _("Got error: " + Log.exceptionStr(ex));
+ }
+ do_check_true(!error);
+
+ _("Verify that there's only one bookmark on the server, and it's Thunderbird.");
+ // Of course, there's also the Bookmarks Toolbar and Bookmarks Menu...
+ let wbos = collection.keys(function (id) {
+ return ["menu", "toolbar", "mobile", "unfiled", folder1_guid].indexOf(id) == -1;
+ });
+ do_check_eq(wbos.length, 1);
+ do_check_eq(wbos[0], bmk2_guid);
+
+ _("Now restore from a backup.");
+ yield BookmarkJSONUtils.importFromFile(backupFile, true);
+
+ _("Ensure we have the bookmarks we expect locally.");
+ let guids = store.getAllIDs();
+ _("GUIDs: " + JSON.stringify(guids));
+ let found = false;
+ let count = 0;
+ let newFX;
+ for (let guid in guids) {
+ count++;
+ let id = store.idForGUID(guid, true);
+ // Only one bookmark, so _all_ should be Firefox!
+ if (PlacesUtils.bookmarks.getItemType(id) == PlacesUtils.bookmarks.TYPE_BOOKMARK) {
+ let uri = PlacesUtils.bookmarks.getBookmarkURI(id);
+ _("Found URI " + uri.spec + " for GUID " + guid);
+ do_check_eq(uri.spec, fxuri.spec);
+ newFX = guid; // Save the new GUID after restore.
+ found = true; // Only runs if the above check passes.
+ }
+ }
+ _("We found it: " + found);
+ do_check_true(found);
+
+ _("Have the correct number of IDs locally, too.");
+ do_check_eq(count, ["menu", "toolbar", "mobile", "unfiled", folder1_id, bmk1_id].length);
+
+ _("Sync again. This'll wipe bookmarks from the server.");
+ try {
+ yield sync_engine_and_validate_telem(engine, false);
+ } catch(ex) {
+ error = ex;
+ _("Got error: " + Log.exceptionStr(ex));
+ }
+ do_check_true(!error);
+
+ _("Verify that there's only one bookmark on the server, and it's Firefox.");
+ // Of course, there's also the Bookmarks Toolbar and Bookmarks Menu...
+ let payloads = server.user("foo").collection("bookmarks").payloads();
+ let bookmarkWBOs = payloads.filter(function (wbo) {
+ return wbo.type == "bookmark";
+ });
+ let folderWBOs = payloads.filter(function (wbo) {
+ return ((wbo.type == "folder") &&
+ (wbo.id != "menu") &&
+ (wbo.id != "toolbar") &&
+ (wbo.id != "unfiled") &&
+ (wbo.id != "mobile"));
+ });
+
+ do_check_eq(bookmarkWBOs.length, 1);
+ do_check_eq(bookmarkWBOs[0].id, newFX);
+ do_check_eq(bookmarkWBOs[0].bmkUri, fxuri.spec);
+ do_check_eq(bookmarkWBOs[0].title, "Get Firefox!");
+
+ _("Our old friend Folder 1 is still in play.");
+ do_check_eq(folderWBOs.length, 1);
+ do_check_eq(folderWBOs[0].title, "Folder 1");
+
+ } finally {
+ store.wipe();
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ yield deferred.promise;
+ }
+});
+
+function FakeRecord(constructor, r) {
+ constructor.call(this, "bookmarks", r.id);
+ for (let x in r) {
+ this[x] = r[x];
+ }
+ // Borrow the constructor's conversion functions.
+ this.toSyncBookmark = constructor.prototype.toSyncBookmark;
+}
+
+// Bug 632287.
+add_task(function* test_mismatched_types() {
+ _("Ensure that handling a record that changes type causes deletion " +
+ "then re-adding.");
+
+ let oldRecord = {
+ "id": "l1nZZXfB8nC7",
+ "type":"folder",
+ "parentName":"Bookmarks Toolbar",
+ "title":"Innerst i Sneglehode",
+ "description":null,
+ "parentid": "toolbar"
+ };
+ oldRecord.cleartext = oldRecord;
+
+ let newRecord = {
+ "id": "l1nZZXfB8nC7",
+ "type":"livemark",
+ "siteUri":"http://sneglehode.wordpress.com/",
+ "feedUri":"http://sneglehode.wordpress.com/feed/",
+ "parentName":"Bookmarks Toolbar",
+ "title":"Innerst i Sneglehode",
+ "description":null,
+ "children":
+ ["HCRq40Rnxhrd", "YeyWCV1RVsYw", "GCceVZMhvMbP", "sYi2hevdArlF",
+ "vjbZlPlSyGY8", "UtjUhVyrpeG6", "rVq8WMG2wfZI", "Lx0tcy43ZKhZ",
+ "oT74WwV8_j4P", "IztsItWVSo3-"],
+ "parentid": "toolbar"
+ };
+ newRecord.cleartext = newRecord;
+
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let server = serverForFoo(engine);
+ new SyncTestingInfrastructure(server.server);
+
+ _("GUID: " + store.GUIDForId(6, true));
+
+ try {
+ let bms = PlacesUtils.bookmarks;
+ let oldR = new FakeRecord(BookmarkFolder, oldRecord);
+ let newR = new FakeRecord(Livemark, newRecord);
+ oldR.parentid = PlacesUtils.bookmarks.toolbarGuid;
+ newR.parentid = PlacesUtils.bookmarks.toolbarGuid;
+
+ store.applyIncoming(oldR);
+ _("Applied old. It's a folder.");
+ let oldID = store.idForGUID(oldR.id);
+ _("Old ID: " + oldID);
+ do_check_eq(bms.getItemType(oldID), bms.TYPE_FOLDER);
+ do_check_false(PlacesUtils.annotations
+ .itemHasAnnotation(oldID, PlacesUtils.LMANNO_FEEDURI));
+
+ store.applyIncoming(newR);
+ let newID = store.idForGUID(newR.id);
+ _("New ID: " + newID);
+
+ _("Applied new. It's a livemark.");
+ do_check_eq(bms.getItemType(newID), bms.TYPE_FOLDER);
+ do_check_true(PlacesUtils.annotations
+ .itemHasAnnotation(newID, PlacesUtils.LMANNO_FEEDURI));
+
+ } finally {
+ store.wipe();
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ yield new Promise(r => server.stop(r));
+ }
+});
+
+add_task(function* test_bookmark_guidMap_fail() {
+ _("Ensure that failures building the GUID map cause early death.");
+
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+
+ let server = serverForFoo(engine);
+ let coll = server.user("foo").collection("bookmarks");
+ new SyncTestingInfrastructure(server.server);
+
+ // Add one item to the server.
+ let itemID = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
+ let itemGUID = store.GUIDForId(itemID);
+ let itemPayload = store.createRecord(itemGUID).cleartext;
+ coll.insert(itemGUID, encryptPayload(itemPayload));
+
+ engine.lastSync = 1; // So we don't back up.
+
+ // Make building the GUID map fail.
+
+ let pbt = PlacesUtils.promiseBookmarksTree;
+ PlacesUtils.promiseBookmarksTree = function() { return Promise.reject("Nooo"); };
+
+ // Ensure that we throw when accessing _guidMap.
+ engine._syncStartup();
+ _("No error.");
+ do_check_false(engine._guidMapFailed);
+
+ _("We get an error if building _guidMap fails in use.");
+ let err;
+ try {
+ _(engine._guidMap);
+ } catch (ex) {
+ err = ex;
+ }
+ do_check_eq(err.code, Engine.prototype.eEngineAbortApplyIncoming);
+ do_check_eq(err.cause, "Nooo");
+
+ _("We get an error and abort during processIncoming.");
+ err = undefined;
+ try {
+ engine._processIncoming();
+ } catch (ex) {
+ err = ex;
+ }
+ do_check_eq(err, "Nooo");
+
+ PlacesUtils.promiseBookmarksTree = pbt;
+ yield new Promise(r => server.stop(r));
+});
+
+add_task(function* test_bookmark_tag_but_no_uri() {
+ _("Ensure that a bookmark record with tags, but no URI, doesn't throw an exception.");
+
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+
+ // We're simply checking that no exception is thrown, so
+ // no actual checks in this test.
+
+ yield PlacesSyncUtils.bookmarks.insert({
+ kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
+ syncId: Utils.makeGUID(),
+ parentSyncId: "toolbar",
+ url: "http://example.com",
+ tags: ["foo"],
+ });
+ yield PlacesSyncUtils.bookmarks.insert({
+ kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
+ syncId: Utils.makeGUID(),
+ parentSyncId: "toolbar",
+ url: "http://example.org",
+ tags: null,
+ });
+ yield PlacesSyncUtils.bookmarks.insert({
+ kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
+ syncId: Utils.makeGUID(),
+ url: "about:fake",
+ parentSyncId: "toolbar",
+ tags: null,
+ });
+
+ let record = new FakeRecord(BookmarkFolder, {
+ parentid: "toolbar",
+ id: Utils.makeGUID(),
+ description: "",
+ tags: ["foo"],
+ title: "Taggy tag",
+ type: "folder"
+ });
+
+ store.create(record);
+ record.tags = ["bar"];
+ store.update(record);
+});
+
+add_task(function* test_misreconciled_root() {
+ _("Ensure that we don't reconcile an arbitrary record with a root.");
+
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let server = serverForFoo(engine);
+
+ // Log real hard for this test.
+ store._log.trace = store._log.debug;
+ engine._log.trace = engine._log.debug;
+
+ engine._syncStartup();
+
+ // Let's find out where the toolbar is right now.
+ let toolbarBefore = store.createRecord("toolbar", "bookmarks");
+ let toolbarIDBefore = store.idForGUID("toolbar");
+ do_check_neq(-1, toolbarIDBefore);
+
+ let parentGUIDBefore = toolbarBefore.parentid;
+ let parentIDBefore = store.idForGUID(parentGUIDBefore);
+ do_check_neq(-1, parentIDBefore);
+ do_check_eq("string", typeof(parentGUIDBefore));
+
+ _("Current parent: " + parentGUIDBefore + " (" + parentIDBefore + ").");
+
+ let to_apply = {
+ id: "zzzzzzzzzzzz",
+ type: "folder",
+ title: "Bookmarks Toolbar",
+ description: "Now you're for it.",
+ parentName: "",
+ parentid: "mobile", // Why not?
+ children: [],
+ };
+
+ let rec = new FakeRecord(BookmarkFolder, to_apply);
+ let encrypted = encryptPayload(rec.cleartext);
+ encrypted.decrypt = function () {
+ for (let x in rec) {
+ encrypted[x] = rec[x];
+ }
+ };
+
+ _("Applying record.");
+ engine._processIncoming({
+ getBatched() {
+ return this.get();
+ },
+ get: function () {
+ this.recordHandler(encrypted);
+ return {success: true}
+ },
+ });
+
+ // Ensure that afterwards, toolbar is still there.
+ // As of 2012-12-05, this only passes because Places doesn't use "toolbar" as
+ // the real GUID, instead using a generated one. Sync does the translation.
+ let toolbarAfter = store.createRecord("toolbar", "bookmarks");
+ let parentGUIDAfter = toolbarAfter.parentid;
+ let parentIDAfter = store.idForGUID(parentGUIDAfter);
+ do_check_eq(store.GUIDForId(toolbarIDBefore), "toolbar");
+ do_check_eq(parentGUIDBefore, parentGUIDAfter);
+ do_check_eq(parentIDBefore, parentIDAfter);
+
+ yield new Promise(r => server.stop(r));
+});
+
+function run_test() {
+ initTestLogging("Trace");
+ generateNewKeys(Service.collectionKeys);
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_bookmark_invalid.js b/services/sync/tests/unit/test_bookmark_invalid.js
new file mode 100644
index 000000000..af476a7f9
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_invalid.js
@@ -0,0 +1,63 @@
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+Service.engineManager.register(BookmarksEngine);
+
+var engine = Service.engineManager.get("bookmarks");
+var store = engine._store;
+var tracker = engine._tracker;
+
+add_task(function* test_ignore_invalid_uri() {
+ _("Ensure that we don't die with invalid bookmarks.");
+
+ // First create a valid bookmark.
+ let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
+ Services.io.newURI("http://example.com/", null, null),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "the title");
+
+ // Now update moz_places with an invalid url.
+ yield PlacesUtils.withConnectionWrapper("test_ignore_invalid_uri", Task.async(function* (db) {
+ yield db.execute(
+ `UPDATE moz_places SET url = :url, url_hash = hash(:url)
+ WHERE id = (SELECT b.fk FROM moz_bookmarks b
+ WHERE b.id = :id LIMIT 1)`,
+ { id: bmid, url: "<invalid url>" });
+ }));
+
+ // Ensure that this doesn't throw even though the DB is now in a bad state (a
+ // bookmark has an illegal url).
+ engine._buildGUIDMap();
+});
+
+add_task(function* test_ignore_missing_uri() {
+ _("Ensure that we don't die with a bookmark referencing an invalid bookmark id.");
+
+ // First create a valid bookmark.
+ let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
+ Services.io.newURI("http://example.com/", null, null),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "the title");
+
+ // Now update moz_bookmarks to reference a non-existing places ID
+ yield PlacesUtils.withConnectionWrapper("test_ignore_missing_uri", Task.async(function* (db) {
+ yield db.execute(
+ `UPDATE moz_bookmarks SET fk = 999999
+ WHERE id = :id`
+ , { id: bmid });
+ }));
+
+ // Ensure that this doesn't throw even though the DB is now in a bad state (a
+ // bookmark has an illegal url).
+ engine._buildGUIDMap();
+});
+
+function run_test() {
+ initTestLogging('Trace');
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js b/services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js
new file mode 100644
index 000000000..207372ed6
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_legacy_microsummaries_support.js
@@ -0,0 +1,99 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// Tests that Sync can correctly handle a legacy microsummary record
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/NetUtil.jsm");
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+const GENERATORURI_ANNO = "microsummary/generatorURI";
+const STATICTITLE_ANNO = "bookmarks/staticTitle";
+
+const TEST_URL = "http://micsum.mozilla.org/";
+const TEST_TITLE = "A microsummarized bookmark"
+const GENERATOR_URL = "http://generate.micsum/"
+const STATIC_TITLE = "Static title"
+
+function newMicrosummary(url, title) {
+ let id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.unfiledBookmarksFolderId, NetUtil.newURI(url),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, title
+ );
+ PlacesUtils.annotations.setItemAnnotation(id, GENERATORURI_ANNO,
+ GENERATOR_URL, 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ PlacesUtils.annotations.setItemAnnotation(id, STATICTITLE_ANNO,
+ "Static title", 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ return id;
+}
+
+function run_test() {
+
+ Service.engineManager.register(BookmarksEngine);
+ let engine = Service.engineManager.get("bookmarks");
+ let store = engine._store;
+
+ // Clean up.
+ store.wipe();
+
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
+
+ _("Create a microsummarized bookmark.");
+ let id = newMicrosummary(TEST_URL, TEST_TITLE);
+ let guid = store.GUIDForId(id);
+ _("GUID: " + guid);
+ do_check_true(!!guid);
+
+ _("Create record object and verify that it's sane.");
+ let record = store.createRecord(guid);
+ do_check_true(record instanceof Bookmark);
+ do_check_eq(record.bmkUri, TEST_URL);
+
+ _("Make sure the new record does not carry the microsummaries annotations.");
+ do_check_false("staticTitle" in record);
+ do_check_false("generatorUri" in record);
+
+ _("Remove the bookmark from Places.");
+ PlacesUtils.bookmarks.removeItem(id);
+
+ _("Convert record to the old microsummaries one.");
+ record.staticTitle = STATIC_TITLE;
+ record.generatorUri = GENERATOR_URL;
+ record.type = "microsummary";
+
+ _("Apply the modified record as incoming data.");
+ store.applyIncoming(record);
+
+ _("Verify it has been created correctly as a simple Bookmark.");
+ id = store.idForGUID(record.id);
+ do_check_eq(store.GUIDForId(id), record.id);
+ do_check_eq(PlacesUtils.bookmarks.getItemType(id),
+ PlacesUtils.bookmarks.TYPE_BOOKMARK);
+ do_check_eq(PlacesUtils.bookmarks.getBookmarkURI(id).spec, TEST_URL);
+ do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), TEST_TITLE);
+ do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
+ PlacesUtils.unfiledBookmarksFolderId);
+ do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), null);
+
+ do_check_throws(
+ () => PlacesUtils.annotations.getItemAnnotation(id, GENERATORURI_ANNO),
+ Cr.NS_ERROR_NOT_AVAILABLE
+ );
+
+ do_check_throws(
+ () => PlacesUtils.annotations.getItemAnnotation(id, STATICTITLE_ANNO),
+ Cr.NS_ERROR_NOT_AVAILABLE
+ );
+
+ // Clean up.
+ store.wipe();
+}
diff --git a/services/sync/tests/unit/test_bookmark_livemarks.js b/services/sync/tests/unit/test_bookmark_livemarks.js
new file mode 100644
index 000000000..8adde76d8
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_livemarks.js
@@ -0,0 +1,134 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://testing-common/services/common/utils.js");
+
+const DESCRIPTION_ANNO = "bookmarkProperties/description";
+
+var engine = Service.engineManager.get("bookmarks");
+var store = engine._store;
+
+// Record borrowed from Bug 631361.
+var record631361 = {
+ id: "M5bwUKK8hPyF",
+ index: 150,
+ modified: 1296768176.49,
+ payload:
+ {"id":"M5bwUKK8hPyF",
+ "type":"livemark",
+ "siteUri":"http://www.bbc.co.uk/go/rss/int/news/-/news/",
+ "feedUri":"http://fxfeeds.mozilla.com/en-US/firefox/headlines.xml",
+ "parentName":"Bookmarks Toolbar",
+ "parentid":"toolbar",
+ "title":"Latest Headlines",
+ "description":"",
+ "children":
+ ["7oBdEZB-8BMO", "SUd1wktMNCTB", "eZe4QWzo1BcY", "YNBhGwhVnQsN",
+ "92Aw2SMEkFg0", "uw0uKqrVFwd-", "x7mx2P3--8FJ", "d-jVF8UuC9Ye",
+ "DV1XVtKLEiZ5", "g4mTaTjr837Z", "1Zi5W3lwBw8T", "FEYqlUHtbBWS",
+ "qQd2u7LjosCB", "VUs2djqYfbvn", "KuhYnHocu7eg", "u2gcg9ILRg-3",
+ "hfK_RP-EC7Ol", "Aq5qsa4E5msH", "6pZIbxuJTn-K", "k_fp0iN3yYMR",
+ "59YD3iNOYO8O", "01afpSdAk2iz", "Cq-kjXDEPIoP", "HtNTjt9UwWWg",
+ "IOU8QRSrTR--", "HJ5lSlBx6d1D", "j2dz5R5U6Khc", "5GvEjrNR0yJl",
+ "67ozIBF5pNVP", "r5YB0cUx6C_w", "FtmFDBNxDQ6J", "BTACeZq9eEtw",
+ "ll4ozQ-_VNJe", "HpImsA4_XuW7", "nJvCUQPLSXwA", "94LG-lh6TUYe",
+ "WHn_QoOL94Os", "l-RvjgsZYlej", "LipQ8abcRstN", "74TiLvarE3n_",
+ "8fCiLQpQGK1P", "Z6h4WkbwfQFa", "GgAzhqakoS6g", "qyt92T8vpMsK",
+ "RyOgVCe2EAOE", "bgSEhW3w6kk5", "hWODjHKGD7Ph", "Cky673aqOHbT",
+ "gZCYT7nx3Nwu", "iJzaJxxrM58L", "rUHCRv68aY5L", "6Jc1hNJiVrV9",
+ "lmNgoayZ-ym8", "R1lyXsDzlfOd", "pinrXwDnRk6g", "Sn7TmZV01vMM",
+ "qoXyU6tcS1dd", "TRLanED-QfBK", "xHbhMeX_FYEA", "aPqacdRlAtaW",
+ "E3H04Wn2RfSi", "eaSIMI6kSrcz", "rtkRxFoG5Vqi", "dectkUglV0Dz",
+ "B4vUE0BE15No", "qgQFW5AQrgB0", "SxAXvwOhu8Zi", "0S6cRPOg-5Z2",
+ "zcZZBGeLnaWW", "B0at8hkQqVZQ", "sgPtgGulbP66", "lwtwGHSCPYaQ",
+ "mNTdpgoRZMbW", "-L8Vci6CbkJY", "bVzudKSQERc1", "Gxl9lb4DXsmL",
+ "3Qr13GucOtEh"]},
+ collection: "bookmarks"
+};
+
+// Clean up after other tests. Only necessary in XULRunner.
+store.wipe();
+
+function makeLivemark(p, mintGUID) {
+ let b = new Livemark("bookmarks", p.id);
+ // Copy here, because tests mutate the contents.
+ b.cleartext = TestingUtils.deepCopy(p);
+
+ if (mintGUID)
+ b.id = Utils.makeGUID();
+
+ return b;
+}
+
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Store.Bookmarks").level = Log.Level.Trace;
+
+ run_next_test();
+}
+
+add_test(function test_livemark_descriptions() {
+ let record = record631361.payload;
+
+ function doRecord(r) {
+ store._childrenToOrder = {};
+ store.applyIncoming(r);
+ store._orderChildren();
+ delete store._childrenToOrder;
+ }
+
+ // Attempt to provoke an error by messing around with the description.
+ record.description = null;
+ doRecord(makeLivemark(record));
+ record.description = "";
+ doRecord(makeLivemark(record));
+
+ // Attempt to provoke an error by adding a bad description anno.
+ let id = store.idForGUID(record.id);
+ PlacesUtils.annotations.setItemAnnotation(id, DESCRIPTION_ANNO, "", 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+
+ run_next_test();
+});
+
+add_test(function test_livemark_invalid() {
+ _("Livemarks considered invalid by nsLivemarkService are skipped.");
+
+ _("Parent is unknown. Will be set to unfiled.");
+ let lateParentRec = makeLivemark(record631361.payload, true);
+ let parentGUID = Utils.makeGUID();
+ lateParentRec.parentid = parentGUID;
+ do_check_eq(-1, store.idForGUID(parentGUID));
+
+ store.create(lateParentRec);
+ recID = store.idForGUID(lateParentRec.id, true);
+ do_check_true(recID > 0);
+ do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(recID),
+ PlacesUtils.bookmarks.unfiledBookmarksFolder);
+
+ _("No feed URI, which is invalid. Will be skipped.");
+ let noFeedURIRec = makeLivemark(record631361.payload, true);
+ delete noFeedURIRec.cleartext.feedUri;
+ store.create(noFeedURIRec);
+ // No exception, but no creation occurs.
+ do_check_eq(-1, store.idForGUID(noFeedURIRec.id, true));
+
+ _("Parent is a Livemark. Will be skipped.");
+ let lmParentRec = makeLivemark(record631361.payload, true);
+ lmParentRec.parentid = store.GUIDForId(recID);
+ store.create(lmParentRec);
+ // No exception, but no creation occurs.
+ do_check_eq(-1, store.idForGUID(lmParentRec.id, true));
+
+ // Clear event loop.
+ Utils.nextTick(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_bookmark_order.js b/services/sync/tests/unit/test_bookmark_order.js
new file mode 100644
index 000000000..7625a813f
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_order.js
@@ -0,0 +1,529 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Making sure after processing incoming bookmarks, they show up in the right order");
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+var check = Task.async(function* (expected, message) {
+ let root = yield PlacesUtils.promiseBookmarksTree();
+
+ let bookmarks = (function mapTree(children) {
+ return children.map(child => {
+ let result = {
+ guid: child.guid,
+ index: child.index,
+ };
+ if (child.children) {
+ result.children = mapTree(child.children);
+ }
+ if (child.annos) {
+ let orphanAnno = child.annos.find(
+ anno => anno.name == "sync/parent");
+ if (orphanAnno) {
+ result.requestedParent = orphanAnno.value;
+ }
+ }
+ return result;
+ });
+ }(root.children));
+
+ _("Checking if the bookmark structure is", JSON.stringify(expected));
+ _("Got bookmarks:", JSON.stringify(bookmarks));
+ deepEqual(bookmarks, expected);
+});
+
+add_task(function* test_bookmark_order() {
+ let store = new BookmarksEngine(Service)._store;
+ initTestLogging("Trace");
+
+ _("Starting with a clean slate of no bookmarks");
+ store.wipe();
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ // Index 2 is the tags root. (Root indices depend on the order of the
+ // `CreateRoot` calls in `Database::CreateBookmarkRoots`).
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "clean slate");
+
+ function bookmark(name, parent) {
+ let bookmark = new Bookmark("http://weave.server/my-bookmark");
+ bookmark.id = name;
+ bookmark.title = name;
+ bookmark.bmkUri = "http://uri/";
+ bookmark.parentid = parent || "unfiled";
+ bookmark.tags = [];
+ return bookmark;
+ }
+
+ function folder(name, parent, children) {
+ let folder = new BookmarkFolder("http://weave.server/my-bookmark-folder");
+ folder.id = name;
+ folder.title = name;
+ folder.parentid = parent || "unfiled";
+ folder.children = children;
+ return folder;
+ }
+
+ function apply(record) {
+ store._childrenToOrder = {};
+ store.applyIncoming(record);
+ store._orderChildren();
+ delete store._childrenToOrder;
+ }
+ let id10 = "10_aaaaaaaaa";
+ _("basic add first bookmark");
+ apply(bookmark(id10, ""));
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "basic add first bookmark");
+ let id20 = "20_aaaaaaaaa";
+ _("basic append behind 10");
+ apply(bookmark(id20, ""));
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "basic append behind 10");
+
+ let id31 = "31_aaaaaaaaa";
+ let id30 = "f30_aaaaaaaa";
+ _("basic create in folder");
+ apply(bookmark(id31, id30));
+ let f30 = folder(id30, "", [id31]);
+ apply(f30);
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }, {
+ guid: id30,
+ index: 2,
+ children: [{
+ guid: id31,
+ index: 0,
+ }],
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "basic create in folder");
+
+ let id41 = "41_aaaaaaaaa";
+ let id40 = "f40_aaaaaaaa";
+ _("insert missing parent -> append to unfiled");
+ apply(bookmark(id41, id40));
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }, {
+ guid: id30,
+ index: 2,
+ children: [{
+ guid: id31,
+ index: 0,
+ }],
+ }, {
+ guid: id41,
+ index: 3,
+ requestedParent: id40,
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "insert missing parent -> append to unfiled");
+
+ let id42 = "42_aaaaaaaaa";
+
+ _("insert another missing parent -> append");
+ apply(bookmark(id42, id40));
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }, {
+ guid: id30,
+ index: 2,
+ children: [{
+ guid: id31,
+ index: 0,
+ }],
+ }, {
+ guid: id41,
+ index: 3,
+ requestedParent: id40,
+ }, {
+ guid: id42,
+ index: 4,
+ requestedParent: id40,
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "insert another missing parent -> append");
+
+ _("insert folder -> move children and followers");
+ let f40 = folder(id40, "", [id41, id42]);
+ apply(f40);
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }, {
+ guid: id30,
+ index: 2,
+ children: [{
+ guid: id31,
+ index: 0,
+ }],
+ }, {
+ guid: id40,
+ index: 3,
+ children: [{
+ guid: id41,
+ index: 0,
+ }, {
+ guid: id42,
+ index: 1,
+ }]
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "insert folder -> move children and followers");
+
+ _("Moving 41 behind 42 -> update f40");
+ f40.children = [id42, id41];
+ apply(f40);
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }, {
+ guid: id30,
+ index: 2,
+ children: [{
+ guid: id31,
+ index: 0,
+ }],
+ }, {
+ guid: id40,
+ index: 3,
+ children: [{
+ guid: id42,
+ index: 0,
+ }, {
+ guid: id41,
+ index: 1,
+ }]
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "Moving 41 behind 42 -> update f40");
+
+ _("Moving 10 back to front -> update 10, 20");
+ f40.children = [id41, id42];
+ apply(f40);
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }, {
+ guid: id30,
+ index: 2,
+ children: [{
+ guid: id31,
+ index: 0,
+ }],
+ }, {
+ guid: id40,
+ index: 3,
+ children: [{
+ guid: id41,
+ index: 0,
+ }, {
+ guid: id42,
+ index: 1,
+ }]
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "Moving 10 back to front -> update 10, 20");
+
+ _("Moving 20 behind 42 in f40 -> update 50");
+ apply(bookmark(id20, id40));
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id30,
+ index: 1,
+ children: [{
+ guid: id31,
+ index: 0,
+ }],
+ }, {
+ guid: id40,
+ index: 2,
+ children: [{
+ guid: id41,
+ index: 0,
+ }, {
+ guid: id42,
+ index: 1,
+ }, {
+ guid: id20,
+ index: 2,
+ }]
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "Moving 20 behind 42 in f40 -> update 50");
+
+ _("Moving 10 in front of 31 in f30 -> update 10, f30");
+ apply(bookmark(id10, id30));
+ f30.children = [id10, id31];
+ apply(f30);
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id30,
+ index: 0,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id31,
+ index: 1,
+ }],
+ }, {
+ guid: id40,
+ index: 1,
+ children: [{
+ guid: id41,
+ index: 0,
+ }, {
+ guid: id42,
+ index: 1,
+ }, {
+ guid: id20,
+ index: 2,
+ }]
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "Moving 10 in front of 31 in f30 -> update 10, f30");
+
+ _("Moving 20 from f40 to f30 -> update 20, f30");
+ apply(bookmark(id20, id30));
+ f30.children = [id10, id20, id31];
+ apply(f30);
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id30,
+ index: 0,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id20,
+ index: 1,
+ }, {
+ guid: id31,
+ index: 2,
+ }],
+ }, {
+ guid: id40,
+ index: 1,
+ children: [{
+ guid: id41,
+ index: 0,
+ }, {
+ guid: id42,
+ index: 1,
+ }]
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "Moving 20 from f40 to f30 -> update 20, f30");
+
+ _("Move 20 back to front -> update 20, f30");
+ apply(bookmark(id20, ""));
+ f30.children = [id10, id31];
+ apply(f30);
+ yield check([{
+ guid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ }, {
+ guid: PlacesUtils.bookmarks.toolbarGuid,
+ index: 1,
+ }, {
+ guid: PlacesUtils.bookmarks.unfiledGuid,
+ index: 3,
+ children: [{
+ guid: id30,
+ index: 0,
+ children: [{
+ guid: id10,
+ index: 0,
+ }, {
+ guid: id31,
+ index: 1,
+ }],
+ }, {
+ guid: id40,
+ index: 1,
+ children: [{
+ guid: id41,
+ index: 0,
+ }, {
+ guid: id42,
+ index: 1,
+ }],
+ }, {
+ guid: id20,
+ index: 2,
+ }],
+ }, {
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ index: 4,
+ }], "Move 20 back to front -> update 20, f30");
+
+});
diff --git a/services/sync/tests/unit/test_bookmark_places_query_rewriting.js b/services/sync/tests/unit/test_bookmark_places_query_rewriting.js
new file mode 100644
index 000000000..0ddf81583
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_places_query_rewriting.js
@@ -0,0 +1,60 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Rewrite place: URIs.");
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+var engine = new BookmarksEngine(Service);
+var store = engine._store;
+
+function makeTagRecord(id, uri) {
+ let tagRecord = new BookmarkQuery("bookmarks", id);
+ tagRecord.queryId = "MagicTags";
+ tagRecord.parentName = "Bookmarks Toolbar";
+ tagRecord.bmkUri = uri;
+ tagRecord.title = "tagtag";
+ tagRecord.folderName = "bar";
+ tagRecord.parentid = PlacesUtils.bookmarks.toolbarGuid;
+ return tagRecord;
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Store.Bookmarks").level = Log.Level.Trace;
+
+ let uri = "place:folder=499&type=7&queryType=1";
+ let tagRecord = makeTagRecord("abcdefabcdef", uri);
+
+ _("Type: " + tagRecord.type);
+ _("Folder name: " + tagRecord.folderName);
+ store.applyIncoming(tagRecord);
+
+ let tags = PlacesUtils.getFolderContents(PlacesUtils.tagsFolderId).root;
+ let tagID;
+ try {
+ for (let i = 0; i < tags.childCount; ++i) {
+ let child = tags.getChild(i);
+ if (child.title == "bar") {
+ tagID = child.itemId;
+ }
+ }
+ } finally {
+ tags.containerOpen = false;
+ }
+
+ _("Tag ID: " + tagID);
+ let insertedRecord = store.createRecord("abcdefabcdef", "bookmarks");
+ do_check_eq(insertedRecord.bmkUri, uri.replace("499", tagID));
+
+ _("... but not if the type is wrong.");
+ let wrongTypeURI = "place:folder=499&type=2&queryType=1";
+ let wrongTypeRecord = makeTagRecord("fedcbafedcba", wrongTypeURI);
+ store.applyIncoming(wrongTypeRecord);
+
+ insertedRecord = store.createRecord("fedcbafedcba", "bookmarks");
+ do_check_eq(insertedRecord.bmkUri, wrongTypeURI);
+}
diff --git a/services/sync/tests/unit/test_bookmark_record.js b/services/sync/tests/unit/test_bookmark_record.js
new file mode 100644
index 000000000..194fef5e2
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_record.js
@@ -0,0 +1,48 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function prepareBookmarkItem(collection, id) {
+ let b = new Bookmark(collection, id);
+ b.cleartext.stuff = "my payload here";
+ return b;
+}
+
+function run_test() {
+ ensureLegacyIdentityManager();
+ Service.identity.username = "john@example.com";
+ Service.identity.syncKey = "abcdeabcdeabcdeabcdeabcdea";
+ generateNewKeys(Service.collectionKeys);
+ let keyBundle = Service.identity.syncKeyBundle;
+
+ let log = Log.repository.getLogger("Test");
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ log.info("Creating a record");
+
+ let u = "http://localhost:8080/storage/bookmarks/foo";
+ let placesItem = new PlacesItem("bookmarks", "foo", "bookmark");
+ let bookmarkItem = prepareBookmarkItem("bookmarks", "foo");
+
+ log.info("Checking getTypeObject");
+ do_check_eq(placesItem.getTypeObject(placesItem.type), Bookmark);
+ do_check_eq(bookmarkItem.getTypeObject(bookmarkItem.type), Bookmark);
+
+ bookmarkItem.encrypt(keyBundle);
+ log.info("Ciphertext is " + bookmarkItem.ciphertext);
+ do_check_true(bookmarkItem.ciphertext != null);
+
+ log.info("Decrypting the record");
+
+ let payload = bookmarkItem.decrypt(keyBundle);
+ do_check_eq(payload.stuff, "my payload here");
+ do_check_eq(bookmarkItem.getTypeObject(bookmarkItem.type), Bookmark);
+ do_check_neq(payload, bookmarkItem.payload); // wrap.data.payload is the encrypted one
+}
diff --git a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
new file mode 100644
index 000000000..942cf2761
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
@@ -0,0 +1,235 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+const SMART_BOOKMARKS_ANNO = "Places/SmartBookmark";
+var IOService = Cc["@mozilla.org/network/io-service;1"]
+ .getService(Ci.nsIIOService);
+("http://www.mozilla.com", null, null);
+
+
+Service.engineManager.register(BookmarksEngine);
+var engine = Service.engineManager.get("bookmarks");
+var store = engine._store;
+
+// Clean up after other tests. Only necessary in XULRunner.
+store.wipe();
+
+function newSmartBookmark(parent, uri, position, title, queryID) {
+ let id = PlacesUtils.bookmarks.insertBookmark(parent, uri, position, title);
+ PlacesUtils.annotations.setItemAnnotation(id, SMART_BOOKMARKS_ANNO,
+ queryID, 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ return id;
+}
+
+function smartBookmarkCount() {
+ // We do it this way because PlacesUtils.annotations.getItemsWithAnnotation
+ // doesn't work the same (or at all?) between 3.6 and 4.0.
+ let out = {};
+ PlacesUtils.annotations.getItemsWithAnnotation(SMART_BOOKMARKS_ANNO, out);
+ return out.value;
+}
+
+function clearBookmarks() {
+ _("Cleaning up existing items.");
+ PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.bookmarksMenuFolder);
+ PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.tagsFolder);
+ PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.toolbarFolder);
+ PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.unfiledBookmarksFolder);
+ startCount = smartBookmarkCount();
+}
+
+function serverForFoo(engine) {
+ return serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {bookmarks: {version: engine.version,
+ syncID: engine.syncID}}}},
+ bookmarks: {}
+ });
+}
+
+// Verify that Places smart bookmarks have their annotation uploaded and
+// handled locally.
+add_task(function *test_annotation_uploaded() {
+ let server = serverForFoo(engine);
+ new SyncTestingInfrastructure(server.server);
+
+ let startCount = smartBookmarkCount();
+
+ _("Start count is " + startCount);
+
+ if (startCount > 0) {
+ // This can happen in XULRunner.
+ clearBookmarks();
+ _("Start count is now " + startCount);
+ }
+
+ _("Create a smart bookmark in the toolbar.");
+ let parent = PlacesUtils.toolbarFolderId;
+ let uri =
+ Utils.makeURI("place:sort=" +
+ Ci.nsINavHistoryQueryOptions.SORT_BY_VISITCOUNT_DESCENDING +
+ "&maxResults=10");
+ let title = "Most Visited";
+
+ let mostVisitedID = newSmartBookmark(parent, uri, -1, title, "MostVisited");
+
+ _("New item ID: " + mostVisitedID);
+ do_check_true(!!mostVisitedID);
+
+ let annoValue = PlacesUtils.annotations.getItemAnnotation(mostVisitedID,
+ SMART_BOOKMARKS_ANNO);
+ _("Anno: " + annoValue);
+ do_check_eq("MostVisited", annoValue);
+
+ let guid = store.GUIDForId(mostVisitedID);
+ _("GUID: " + guid);
+ do_check_true(!!guid);
+
+ _("Create record object and verify that it's sane.");
+ let record = store.createRecord(guid);
+ do_check_true(record instanceof Bookmark);
+ do_check_true(record instanceof BookmarkQuery);
+
+ do_check_eq(record.bmkUri, uri.spec);
+
+ _("Make sure the new record carries with it the annotation.");
+ do_check_eq("MostVisited", record.queryId);
+
+ _("Our count has increased since we started.");
+ do_check_eq(smartBookmarkCount(), startCount + 1);
+
+ _("Sync record to the server.");
+ let collection = server.user("foo").collection("bookmarks");
+
+ try {
+ yield sync_engine_and_validate_telem(engine, false);
+ let wbos = collection.keys(function (id) {
+ return ["menu", "toolbar", "mobile", "unfiled"].indexOf(id) == -1;
+ });
+ do_check_eq(wbos.length, 1);
+
+ _("Verify that the server WBO has the annotation.");
+ let serverGUID = wbos[0];
+ do_check_eq(serverGUID, guid);
+ let serverWBO = collection.wbo(serverGUID);
+ do_check_true(!!serverWBO);
+ let body = JSON.parse(JSON.parse(serverWBO.payload).ciphertext);
+ do_check_eq(body.queryId, "MostVisited");
+
+ _("We still have the right count.");
+ do_check_eq(smartBookmarkCount(), startCount + 1);
+
+ _("Clear local records; now we can't find it.");
+
+ // "Clear" by changing attributes: if we delete it, apparently it sticks
+ // around as a deleted record...
+ PlacesUtils.bookmarks.setItemTitle(mostVisitedID, "Not Most Visited");
+ PlacesUtils.bookmarks.changeBookmarkURI(
+ mostVisitedID, Utils.makeURI("http://something/else"));
+ PlacesUtils.annotations.removeItemAnnotation(mostVisitedID,
+ SMART_BOOKMARKS_ANNO);
+ store.wipe();
+ engine.resetClient();
+ do_check_eq(smartBookmarkCount(), startCount);
+
+ _("Sync. Verify that the downloaded record carries the annotation.");
+ yield sync_engine_and_validate_telem(engine, false);
+
+ _("Verify that the Places DB now has an annotated bookmark.");
+ _("Our count has increased again.");
+ do_check_eq(smartBookmarkCount(), startCount + 1);
+
+ _("Find by GUID and verify that it's annotated.");
+ let newID = store.idForGUID(serverGUID);
+ let newAnnoValue = PlacesUtils.annotations.getItemAnnotation(
+ newID, SMART_BOOKMARKS_ANNO);
+ do_check_eq(newAnnoValue, "MostVisited");
+ do_check_eq(PlacesUtils.bookmarks.getBookmarkURI(newID).spec, uri.spec);
+
+ _("Test updating.");
+ let newRecord = store.createRecord(serverGUID);
+ do_check_eq(newRecord.queryId, newAnnoValue);
+ newRecord.queryId = "LeastVisited";
+ store.update(newRecord);
+ do_check_eq("LeastVisited", PlacesUtils.annotations.getItemAnnotation(
+ newID, SMART_BOOKMARKS_ANNO));
+
+
+ } finally {
+ // Clean up.
+ store.wipe();
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_smart_bookmarks_duped() {
+ let server = serverForFoo(engine);
+ new SyncTestingInfrastructure(server.server);
+
+ let parent = PlacesUtils.toolbarFolderId;
+ let uri =
+ Utils.makeURI("place:sort=" +
+ Ci.nsINavHistoryQueryOptions.SORT_BY_VISITCOUNT_DESCENDING +
+ "&maxResults=10");
+ let title = "Most Visited";
+ let mostVisitedID = newSmartBookmark(parent, uri, -1, title, "MostVisited");
+ let mostVisitedGUID = store.GUIDForId(mostVisitedID);
+
+ let record = store.createRecord(mostVisitedGUID);
+
+ _("Prepare sync.");
+ let collection = server.user("foo").collection("bookmarks");
+
+ try {
+ engine._syncStartup();
+
+ _("Verify that mapDupe uses the anno, discovering a dupe regardless of URI.");
+ do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+
+ record.bmkUri = "http://foo/";
+ do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+ do_check_neq(PlacesUtils.bookmarks.getBookmarkURI(mostVisitedID).spec,
+ record.bmkUri);
+
+ _("Verify that different annos don't dupe.");
+ let other = new BookmarkQuery("bookmarks", "abcdefabcdef");
+ other.queryId = "LeastVisited";
+ other.parentName = "Bookmarks Toolbar";
+ other.bmkUri = "place:foo";
+ other.title = "";
+ do_check_eq(undefined, engine._findDupe(other));
+
+ _("Handle records without a queryId entry.");
+ record.bmkUri = uri;
+ delete record.queryId;
+ do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+
+ engine._syncFinish();
+
+ } finally {
+ // Clean up.
+ store.wipe();
+ server.stop(do_test_finished);
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ }
+});
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
+
+ generateNewKeys(Service.collectionKeys);
+
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_bookmark_store.js b/services/sync/tests/unit/test_bookmark_store.js
new file mode 100644
index 000000000..902206ba6
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_store.js
@@ -0,0 +1,534 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+const PARENT_ANNO = "sync/parent";
+
+Service.engineManager.register(BookmarksEngine);
+
+var engine = Service.engineManager.get("bookmarks");
+var store = engine._store;
+var tracker = engine._tracker;
+
+// Don't write some persistence files asynchronously.
+tracker.persistChangedIDs = false;
+
+var fxuri = Utils.makeURI("http://getfirefox.com/");
+var tburi = Utils.makeURI("http://getthunderbird.com/");
+
+add_task(function* test_ignore_specials() {
+ _("Ensure that we can't delete bookmark roots.");
+
+ // Belt...
+ let record = new BookmarkFolder("bookmarks", "toolbar", "folder");
+ record.deleted = true;
+ do_check_neq(null, store.idForGUID("toolbar"));
+
+ store.applyIncoming(record);
+ yield store.deletePending();
+
+ // Ensure that the toolbar exists.
+ do_check_neq(null, store.idForGUID("toolbar"));
+
+ // This will fail painfully in getItemType if the deletion worked.
+ engine._buildGUIDMap();
+
+ // Braces...
+ store.remove(record);
+ yield store.deletePending();
+ do_check_neq(null, store.idForGUID("toolbar"));
+ engine._buildGUIDMap();
+
+ store.wipe();
+});
+
+add_test(function test_bookmark_create() {
+ try {
+ _("Ensure the record isn't present yet.");
+ let ids = PlacesUtils.bookmarks.getBookmarkIdsForURI(fxuri, {});
+ do_check_eq(ids.length, 0);
+
+ _("Let's create a new record.");
+ let fxrecord = new Bookmark("bookmarks", "get-firefox1");
+ fxrecord.bmkUri = fxuri.spec;
+ fxrecord.description = "Firefox is awesome.";
+ fxrecord.title = "Get Firefox!";
+ fxrecord.tags = ["firefox", "awesome", "browser"];
+ fxrecord.keyword = "awesome";
+ fxrecord.loadInSidebar = false;
+ fxrecord.parentName = "Bookmarks Toolbar";
+ fxrecord.parentid = "toolbar";
+ store.applyIncoming(fxrecord);
+
+ _("Verify it has been created correctly.");
+ let id = store.idForGUID(fxrecord.id);
+ do_check_eq(store.GUIDForId(id), fxrecord.id);
+ do_check_eq(PlacesUtils.bookmarks.getItemType(id),
+ PlacesUtils.bookmarks.TYPE_BOOKMARK);
+ do_check_true(PlacesUtils.bookmarks.getBookmarkURI(id).equals(fxuri));
+ do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), fxrecord.title);
+ do_check_eq(PlacesUtils.annotations.getItemAnnotation(id, "bookmarkProperties/description"),
+ fxrecord.description);
+ do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
+ PlacesUtils.bookmarks.toolbarFolder);
+ do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), fxrecord.keyword);
+
+ _("Have the store create a new record object. Verify that it has the same data.");
+ let newrecord = store.createRecord(fxrecord.id);
+ do_check_true(newrecord instanceof Bookmark);
+ for (let property of ["type", "bmkUri", "description", "title",
+ "keyword", "parentName", "parentid"]) {
+ do_check_eq(newrecord[property], fxrecord[property]);
+ }
+ do_check_true(Utils.deepEquals(newrecord.tags.sort(),
+ fxrecord.tags.sort()));
+
+ _("The calculated sort index is based on frecency data.");
+ do_check_true(newrecord.sortindex >= 150);
+
+ _("Create a record with some values missing.");
+ let tbrecord = new Bookmark("bookmarks", "thunderbird1");
+ tbrecord.bmkUri = tburi.spec;
+ tbrecord.parentName = "Bookmarks Toolbar";
+ tbrecord.parentid = "toolbar";
+ store.applyIncoming(tbrecord);
+
+ _("Verify it has been created correctly.");
+ id = store.idForGUID(tbrecord.id);
+ do_check_eq(store.GUIDForId(id), tbrecord.id);
+ do_check_eq(PlacesUtils.bookmarks.getItemType(id),
+ PlacesUtils.bookmarks.TYPE_BOOKMARK);
+ do_check_true(PlacesUtils.bookmarks.getBookmarkURI(id).equals(tburi));
+ do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), null);
+ let error;
+ try {
+ PlacesUtils.annotations.getItemAnnotation(id, "bookmarkProperties/description");
+ } catch(ex) {
+ error = ex;
+ }
+ do_check_eq(error.result, Cr.NS_ERROR_NOT_AVAILABLE);
+ do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
+ PlacesUtils.bookmarks.toolbarFolder);
+ do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), null);
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_test(function test_bookmark_update() {
+ try {
+ _("Create a bookmark whose values we'll change.");
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder, fxuri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ PlacesUtils.annotations.setItemAnnotation(
+ bmk1_id, "bookmarkProperties/description", "Firefox is awesome.", 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ PlacesUtils.bookmarks.setKeywordForBookmark(bmk1_id, "firefox");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+
+ _("Update the record with some null values.");
+ let record = store.createRecord(bmk1_guid);
+ record.title = null;
+ record.description = null;
+ record.keyword = null;
+ record.tags = null;
+ store.applyIncoming(record);
+
+ _("Verify that the values have been cleared.");
+ do_check_throws(function () {
+ PlacesUtils.annotations.getItemAnnotation(
+ bmk1_id, "bookmarkProperties/description");
+ }, Cr.NS_ERROR_NOT_AVAILABLE);
+ do_check_eq(PlacesUtils.bookmarks.getItemTitle(bmk1_id), null);
+ do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(bmk1_id), null);
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_test(function test_bookmark_createRecord() {
+ try {
+ _("Create a bookmark without a description or title.");
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder, fxuri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, null);
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+
+ _("Verify that the record is created accordingly.");
+ let record = store.createRecord(bmk1_guid);
+ do_check_eq(record.title, "");
+ do_check_eq(record.description, null);
+ do_check_eq(record.keyword, null);
+
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_test(function test_folder_create() {
+ try {
+ _("Create a folder.");
+ let folder = new BookmarkFolder("bookmarks", "testfolder-1");
+ folder.parentName = "Bookmarks Toolbar";
+ folder.parentid = "toolbar";
+ folder.title = "Test Folder";
+ store.applyIncoming(folder);
+
+ _("Verify it has been created correctly.");
+ let id = store.idForGUID(folder.id);
+ do_check_eq(PlacesUtils.bookmarks.getItemType(id),
+ PlacesUtils.bookmarks.TYPE_FOLDER);
+ do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), folder.title);
+ do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
+ PlacesUtils.bookmarks.toolbarFolder);
+
+ _("Have the store create a new record object. Verify that it has the same data.");
+ let newrecord = store.createRecord(folder.id);
+ do_check_true(newrecord instanceof BookmarkFolder);
+ for (let property of ["title", "parentName", "parentid"])
+ do_check_eq(newrecord[property], folder[property]);
+
+ _("Folders have high sort index to ensure they're synced first.");
+ do_check_eq(newrecord.sortindex, 1000000);
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_test(function test_folder_createRecord() {
+ try {
+ _("Create a folder.");
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
+ let folder1_guid = store.GUIDForId(folder1_id);
+
+ _("Create two bookmarks in that folder without assigning them GUIDs.");
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
+
+ _("Create a record for the folder and verify basic properties.");
+ let record = store.createRecord(folder1_guid);
+ do_check_true(record instanceof BookmarkFolder);
+ do_check_eq(record.title, "Folder1");
+ do_check_eq(record.parentid, "toolbar");
+ do_check_eq(record.parentName, "Bookmarks Toolbar");
+
+ _("Verify the folder's children. Ensures that the bookmarks were given GUIDs.");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+ let bmk2_guid = store.GUIDForId(bmk2_id);
+ do_check_eq(record.children.length, 2);
+ do_check_eq(record.children[0], bmk1_guid);
+ do_check_eq(record.children[1], bmk2_guid);
+
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_task(function* test_deleted() {
+ try {
+ _("Create a bookmark that will be deleted.");
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder, fxuri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+
+ _("Delete the bookmark through the store.");
+ let record = new PlacesItem("bookmarks", bmk1_guid);
+ record.deleted = true;
+ store.applyIncoming(record);
+ yield store.deletePending();
+ _("Ensure it has been deleted.");
+ let error;
+ try {
+ PlacesUtils.bookmarks.getBookmarkURI(bmk1_id);
+ } catch(ex) {
+ error = ex;
+ }
+ do_check_eq(error.result, Cr.NS_ERROR_ILLEGAL_VALUE);
+
+ let newrec = store.createRecord(bmk1_guid);
+ do_check_eq(newrec.deleted, true);
+
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ }
+});
+
+add_test(function test_move_folder() {
+ try {
+ _("Create two folders and a bookmark in one of them.");
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
+ let folder1_guid = store.GUIDForId(folder1_id);
+ let folder2_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder2", 0);
+ let folder2_guid = store.GUIDForId(folder2_id);
+ let bmk_id = PlacesUtils.bookmarks.insertBookmark(
+ folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bmk_guid = store.GUIDForId(bmk_id);
+
+ _("Get a record, reparent it and apply it to the store.");
+ let record = store.createRecord(bmk_guid);
+ do_check_eq(record.parentid, folder1_guid);
+ record.parentid = folder2_guid;
+ store.applyIncoming(record);
+
+ _("Verify the new parent.");
+ let new_folder_id = PlacesUtils.bookmarks.getFolderIdForItem(bmk_id);
+ do_check_eq(store.GUIDForId(new_folder_id), folder2_guid);
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_test(function test_move_order() {
+ // Make sure the tracker is turned on.
+ Svc.Obs.notify("weave:engine:start-tracking");
+ try {
+ _("Create two bookmarks");
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder, fxuri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+ let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder, tburi,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
+ let bmk2_guid = store.GUIDForId(bmk2_id);
+
+ _("Verify order.");
+ do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 0);
+ do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 1);
+ let toolbar = store.createRecord("toolbar");
+ do_check_eq(toolbar.children.length, 2);
+ do_check_eq(toolbar.children[0], bmk1_guid);
+ do_check_eq(toolbar.children[1], bmk2_guid);
+
+ _("Move bookmarks around.");
+ store._childrenToOrder = {};
+ toolbar.children = [bmk2_guid, bmk1_guid];
+ store.applyIncoming(toolbar);
+ // Bookmarks engine does this at the end of _processIncoming
+ tracker.ignoreAll = true;
+ store._orderChildren();
+ tracker.ignoreAll = false;
+ delete store._childrenToOrder;
+
+ _("Verify new order.");
+ do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 0);
+ do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 1);
+
+ } finally {
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_test(function test_orphan() {
+ try {
+
+ _("Add a new bookmark locally.");
+ let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder, fxuri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bmk1_guid = store.GUIDForId(bmk1_id);
+ do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(bmk1_id),
+ PlacesUtils.bookmarks.toolbarFolder);
+ let error;
+ try {
+ PlacesUtils.annotations.getItemAnnotation(bmk1_id, PARENT_ANNO);
+ } catch(ex) {
+ error = ex;
+ }
+ do_check_eq(error.result, Cr.NS_ERROR_NOT_AVAILABLE);
+
+ _("Apply a server record that is the same but refers to non-existent folder.");
+ let record = store.createRecord(bmk1_guid);
+ record.parentid = "non-existent";
+ store.applyIncoming(record);
+
+ _("Verify that bookmark has been flagged as orphan, has not moved.");
+ do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(bmk1_id),
+ PlacesUtils.bookmarks.toolbarFolder);
+ do_check_eq(PlacesUtils.annotations.getItemAnnotation(bmk1_id, PARENT_ANNO),
+ "non-existent");
+
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+add_test(function test_reparentOrphans() {
+ try {
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
+ let folder1_guid = store.GUIDForId(folder1_id);
+
+ _("Create a bogus orphan record and write the record back to the store to trigger _reparentOrphans.");
+ PlacesUtils.annotations.setItemAnnotation(
+ folder1_id, PARENT_ANNO, folder1_guid, 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ let record = store.createRecord(folder1_guid);
+ record.title = "New title for Folder 1";
+ store._childrenToOrder = {};
+ store.applyIncoming(record);
+
+ _("Verify that is has been marked as an orphan even though it couldn't be moved into itself.");
+ do_check_eq(PlacesUtils.annotations.getItemAnnotation(folder1_id, PARENT_ANNO),
+ folder1_guid);
+
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ run_next_test();
+ }
+});
+
+// Tests Bug 806460, in which query records arrive with empty folder
+// names and missing bookmark URIs.
+add_test(function test_empty_query_doesnt_die() {
+ let record = new BookmarkQuery("bookmarks", "8xoDGqKrXf1P");
+ record.folderName = "";
+ record.queryId = "";
+ record.parentName = "Toolbar";
+ record.parentid = "toolbar";
+
+ // These should not throw.
+ store.applyIncoming(record);
+
+ delete record.folderName;
+ store.applyIncoming(record);
+
+ run_next_test();
+});
+
+function assertDeleted(id) {
+ let error;
+ try {
+ PlacesUtils.bookmarks.getItemType(id);
+ } catch (e) {
+ error = e;
+ }
+ equal(error.result, Cr.NS_ERROR_ILLEGAL_VALUE)
+}
+
+add_task(function* test_delete_buffering() {
+ store.wipe();
+ try {
+ _("Create a folder with two bookmarks.");
+ let folder = new BookmarkFolder("bookmarks", "testfolder-1");
+ folder.parentName = "Bookmarks Toolbar";
+ folder.parentid = "toolbar";
+ folder.title = "Test Folder";
+ store.applyIncoming(folder);
+
+
+ let fxRecord = new Bookmark("bookmarks", "get-firefox1");
+ fxRecord.bmkUri = fxuri.spec;
+ fxRecord.title = "Get Firefox!";
+ fxRecord.parentName = "Test Folder";
+ fxRecord.parentid = "testfolder-1";
+
+ let tbRecord = new Bookmark("bookmarks", "get-tndrbrd1");
+ tbRecord.bmkUri = tburi.spec;
+ tbRecord.title = "Get Thunderbird!";
+ tbRecord.parentName = "Test Folder";
+ tbRecord.parentid = "testfolder-1";
+
+ store.applyIncoming(fxRecord);
+ store.applyIncoming(tbRecord);
+
+ let folderId = store.idForGUID(folder.id);
+ let fxRecordId = store.idForGUID(fxRecord.id);
+ let tbRecordId = store.idForGUID(tbRecord.id);
+
+ _("Check everything was created correctly.");
+
+ equal(PlacesUtils.bookmarks.getItemType(fxRecordId),
+ PlacesUtils.bookmarks.TYPE_BOOKMARK);
+ equal(PlacesUtils.bookmarks.getItemType(tbRecordId),
+ PlacesUtils.bookmarks.TYPE_BOOKMARK);
+ equal(PlacesUtils.bookmarks.getItemType(folderId),
+ PlacesUtils.bookmarks.TYPE_FOLDER);
+
+ equal(PlacesUtils.bookmarks.getFolderIdForItem(fxRecordId), folderId);
+ equal(PlacesUtils.bookmarks.getFolderIdForItem(tbRecordId), folderId);
+ equal(PlacesUtils.bookmarks.getFolderIdForItem(folderId),
+ PlacesUtils.bookmarks.toolbarFolder);
+
+ _("Delete the folder and one bookmark.");
+
+ let deleteFolder = new PlacesItem("bookmarks", "testfolder-1");
+ deleteFolder.deleted = true;
+
+ let deleteFxRecord = new PlacesItem("bookmarks", "get-firefox1");
+ deleteFxRecord.deleted = true;
+
+ store.applyIncoming(deleteFolder);
+ store.applyIncoming(deleteFxRecord);
+
+ _("Check that we haven't deleted them yet, but that the deletions are queued");
+ // these will throw if we've deleted them
+ equal(PlacesUtils.bookmarks.getItemType(fxRecordId),
+ PlacesUtils.bookmarks.TYPE_BOOKMARK);
+
+ equal(PlacesUtils.bookmarks.getItemType(folderId),
+ PlacesUtils.bookmarks.TYPE_FOLDER);
+
+ equal(PlacesUtils.bookmarks.getFolderIdForItem(fxRecordId), folderId);
+
+ ok(store._foldersToDelete.has(folder.id));
+ ok(store._atomsToDelete.has(fxRecord.id));
+ ok(!store._atomsToDelete.has(tbRecord.id));
+
+ _("Process pending deletions and ensure that the right things are deleted.");
+ let updatedGuids = yield store.deletePending();
+
+ deepEqual(updatedGuids.sort(), ["get-tndrbrd1", "toolbar"]);
+
+ assertDeleted(fxRecordId);
+ assertDeleted(folderId);
+
+ ok(!store._foldersToDelete.has(folder.id));
+ ok(!store._atomsToDelete.has(fxRecord.id));
+
+ equal(PlacesUtils.bookmarks.getFolderIdForItem(tbRecordId),
+ PlacesUtils.bookmarks.toolbarFolder);
+
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ }
+});
+
+
+function run_test() {
+ initTestLogging('Trace');
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_bookmark_tracker.js b/services/sync/tests/unit/test_bookmark_tracker.js
new file mode 100644
index 000000000..9b9242579
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_tracker.js
@@ -0,0 +1,1537 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource:///modules/PlacesUIUtils.jsm");
+
+Service.engineManager.register(BookmarksEngine);
+var engine = Service.engineManager.get("bookmarks");
+var store = engine._store;
+var tracker = engine._tracker;
+
+store.wipe();
+tracker.persistChangedIDs = false;
+
+const DAY_IN_MS = 24 * 60 * 60 * 1000;
+
+// Test helpers.
+function* verifyTrackerEmpty() {
+ let changes = engine.pullNewChanges();
+ equal(changes.count(), 0);
+ equal(tracker.score, 0);
+}
+
+function* resetTracker() {
+ tracker.clearChangedIDs();
+ tracker.resetScore();
+}
+
+function* cleanup() {
+ store.wipe();
+ yield resetTracker();
+ yield stopTracking();
+}
+
+// startTracking is a signal that the test wants to notice things that happen
+// after this is called (ie, things already tracked should be discarded.)
+function* startTracking() {
+ Svc.Obs.notify("weave:engine:start-tracking");
+}
+
+function* stopTracking() {
+ Svc.Obs.notify("weave:engine:stop-tracking");
+}
+
+function* verifyTrackedItems(tracked) {
+ let changes = engine.pullNewChanges();
+ let trackedIDs = new Set(changes.ids());
+ for (let guid of tracked) {
+ ok(changes.has(guid), `${guid} should be tracked`);
+ ok(changes.getModifiedTimestamp(guid) > 0,
+ `${guid} should have a modified time`);
+ trackedIDs.delete(guid);
+ }
+ equal(trackedIDs.size, 0, `Unhandled tracked IDs: ${
+ JSON.stringify(Array.from(trackedIDs))}`);
+}
+
+function* verifyTrackedCount(expected) {
+ let changes = engine.pullNewChanges();
+ equal(changes.count(), expected);
+}
+
+// Copied from PlacesSyncUtils.jsm.
+function findAnnoItems(anno, val) {
+ let annos = PlacesUtils.annotations;
+ return annos.getItemsWithAnnotation(anno, {}).filter(id =>
+ annos.getItemAnnotation(id, anno) == val);
+}
+
+add_task(function* test_tracking() {
+ _("Test starting and stopping the tracker");
+
+ let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ "Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
+ function createBmk() {
+ return PlacesUtils.bookmarks.insertBookmark(
+ folder, Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ }
+
+ try {
+ _("Create bookmark. Won't show because we haven't started tracking yet");
+ createBmk();
+ yield verifyTrackedCount(0);
+ do_check_eq(tracker.score, 0);
+
+ _("Tell the tracker to start tracking changes.");
+ yield startTracking();
+ createBmk();
+ // We expect two changed items because the containing folder
+ // changed as well (new child).
+ yield verifyTrackedCount(2);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+
+ _("Notifying twice won't do any harm.");
+ yield startTracking();
+ createBmk();
+ yield verifyTrackedCount(3);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 4);
+
+ _("Let's stop tracking again.");
+ yield resetTracker();
+ yield stopTracking();
+ createBmk();
+ yield verifyTrackedCount(0);
+ do_check_eq(tracker.score, 0);
+
+ _("Notifying twice won't do any harm.");
+ yield stopTracking();
+ createBmk();
+ yield verifyTrackedCount(0);
+ do_check_eq(tracker.score, 0);
+
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_batch_tracking() {
+ _("Test tracker does the correct thing during and after a places 'batch'");
+
+ yield startTracking();
+
+ PlacesUtils.bookmarks.runInBatchMode({
+ runBatched: function() {
+ let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ "Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
+ // We should be tracking the new folder and its parent (and need to jump
+ // through blocking hoops...)
+ Async.promiseSpinningly(Task.spawn(verifyTrackedCount(2)));
+ // But not have bumped the score.
+ do_check_eq(tracker.score, 0);
+ }
+ }, null);
+
+ // Out of batch mode - tracker should be the same, but score should be up.
+ yield verifyTrackedCount(2);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ yield cleanup();
+});
+
+add_task(function* test_nested_batch_tracking() {
+ _("Test tracker does the correct thing if a places 'batch' is nested");
+
+ yield startTracking();
+
+ PlacesUtils.bookmarks.runInBatchMode({
+ runBatched: function() {
+
+ PlacesUtils.bookmarks.runInBatchMode({
+ runBatched: function() {
+ let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ "Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
+ // We should be tracking the new folder and its parent (and need to jump
+ // through blocking hoops...)
+ Async.promiseSpinningly(Task.spawn(verifyTrackedCount(2)));
+ // But not have bumped the score.
+ do_check_eq(tracker.score, 0);
+ }
+ }, null);
+ _("inner batch complete.");
+ // should still not have a score as the outer batch is pending.
+ do_check_eq(tracker.score, 0);
+ }
+ }, null);
+
+ // Out of both batches - tracker should be the same, but score should be up.
+ yield verifyTrackedCount(2);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ yield cleanup();
+});
+
+add_task(function* test_tracker_sql_batching() {
+ _("Test tracker does the correct thing when it is forced to batch SQL queries");
+
+ const SQLITE_MAX_VARIABLE_NUMBER = 999;
+ let numItems = SQLITE_MAX_VARIABLE_NUMBER * 2 + 10;
+ let createdIDs = [];
+
+ yield startTracking();
+
+ PlacesUtils.bookmarks.runInBatchMode({
+ runBatched: function() {
+ for (let i = 0; i < numItems; i++) {
+ let syncBmkID = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.unfiledBookmarksFolder,
+ Utils.makeURI("https://example.org/" + i),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Sync Bookmark " + i);
+ createdIDs.push(syncBmkID);
+ }
+ }
+ }, null);
+
+ do_check_eq(createdIDs.length, numItems);
+ yield verifyTrackedCount(numItems + 1); // the folder is also tracked.
+ yield cleanup();
+});
+
+add_task(function* test_onItemAdded() {
+ _("Items inserted via the synchronous bookmarks API should be tracked");
+
+ try {
+ yield startTracking();
+
+ _("Insert a folder using the sync API");
+ let syncFolderID = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder, "Sync Folder",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let syncFolderGUID = engine._store.GUIDForId(syncFolderID);
+ yield verifyTrackedItems(["menu", syncFolderGUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+
+ yield resetTracker();
+ yield startTracking();
+
+ _("Insert a bookmark using the sync API");
+ let syncBmkID = PlacesUtils.bookmarks.insertBookmark(syncFolderID,
+ Utils.makeURI("https://example.org/sync"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Sync Bookmark");
+ let syncBmkGUID = engine._store.GUIDForId(syncBmkID);
+ yield verifyTrackedItems([syncFolderGUID, syncBmkGUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+
+ yield resetTracker();
+ yield startTracking();
+
+ _("Insert a separator using the sync API");
+ let syncSepID = PlacesUtils.bookmarks.insertSeparator(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ PlacesUtils.bookmarks.getItemIndex(syncFolderID));
+ let syncSepGUID = engine._store.GUIDForId(syncSepID);
+ yield verifyTrackedItems(["menu", syncSepGUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemAdded() {
+ _("Items inserted via the asynchronous bookmarks API should be tracked");
+
+ try {
+ yield startTracking();
+
+ _("Insert a folder using the async API");
+ let asyncFolder = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_FOLDER,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ title: "Async Folder",
+ });
+ yield verifyTrackedItems(["menu", asyncFolder.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+
+ yield resetTracker();
+ yield startTracking();
+
+ _("Insert a bookmark using the async API");
+ let asyncBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: asyncFolder.guid,
+ url: "https://example.org/async",
+ title: "Async Bookmark",
+ });
+ yield verifyTrackedItems([asyncFolder.guid, asyncBmk.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+
+ yield resetTracker();
+ yield startTracking();
+
+ _("Insert a separator using the async API");
+ let asyncSep = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_SEPARATOR,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ index: asyncFolder.index,
+ });
+ yield verifyTrackedItems(["menu", asyncSep.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemChanged() {
+ _("Items updated using the asynchronous bookmarks API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert a bookmark");
+ let fxBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ _(`Firefox GUID: ${fxBmk.guid}`);
+
+ yield startTracking();
+
+ _("Update the bookmark using the async API");
+ yield PlacesUtils.bookmarks.update({
+ guid: fxBmk.guid,
+ title: "Download Firefox",
+ url: "https://www.mozilla.org/firefox",
+ // PlacesUtils.bookmarks.update rejects last modified dates older than
+ // the added date.
+ lastModified: new Date(Date.now() + DAY_IN_MS),
+ });
+
+ yield verifyTrackedItems([fxBmk.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemChanged_itemDates() {
+ _("Changes to item dates should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert a bookmark");
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ _(`Firefox GUID: ${fx_guid}`);
+
+ yield startTracking();
+
+ _("Reset the bookmark's added date");
+ // Convert to microseconds for PRTime.
+ let dateAdded = (Date.now() - DAY_IN_MS) * 1000;
+ PlacesUtils.bookmarks.setItemDateAdded(fx_id, dateAdded);
+ yield verifyTrackedItems([fx_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ yield resetTracker();
+
+ _("Set the bookmark's last modified date");
+ let dateModified = Date.now() * 1000;
+ PlacesUtils.bookmarks.setItemLastModified(fx_id, dateModified);
+ yield verifyTrackedItems([fx_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemChanged_changeBookmarkURI() {
+ _("Changes to bookmark URIs should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert a bookmark");
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ _(`Firefox GUID: ${fx_guid}`);
+
+ _("Set a tracked annotation to make sure we only notify once");
+ PlacesUtils.annotations.setItemAnnotation(
+ fx_id, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+
+ yield startTracking();
+
+ _("Change the bookmark's URI");
+ PlacesUtils.bookmarks.changeBookmarkURI(fx_id,
+ Utils.makeURI("https://www.mozilla.org/firefox"));
+ yield verifyTrackedItems([fx_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemTagged() {
+ _("Items tagged using the synchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Create a folder");
+ let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folderGUID = engine._store.GUIDForId(folder);
+ _("Folder ID: " + folder);
+ _("Folder GUID: " + folderGUID);
+
+ _("Track changes to tags");
+ let uri = Utils.makeURI("http://getfirefox.com");
+ let b = PlacesUtils.bookmarks.insertBookmark(
+ folder, uri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bGUID = engine._store.GUIDForId(b);
+ _("New item is " + b);
+ _("GUID: " + bGUID);
+
+ yield startTracking();
+
+ _("Tag the item");
+ PlacesUtils.tagging.tagURI(uri, ["foo"]);
+
+ // bookmark should be tracked, folder should not be.
+ yield verifyTrackedItems([bGUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 5);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemUntagged() {
+ _("Items untagged using the synchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert tagged bookmarks");
+ let uri = Utils.makeURI("http://getfirefox.com");
+ let fx1ID = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder, uri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let fx1GUID = engine._store.GUIDForId(fx1ID);
+ // Different parent and title; same URL.
+ let fx2ID = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder, uri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Download Firefox");
+ let fx2GUID = engine._store.GUIDForId(fx2ID);
+ PlacesUtils.tagging.tagURI(uri, ["foo"]);
+
+ yield startTracking();
+
+ _("Remove the tag");
+ PlacesUtils.tagging.untagURI(uri, ["foo"]);
+
+ yield verifyTrackedItems([fx1GUID, fx2GUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemUntagged() {
+ _("Items untagged using the asynchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert tagged bookmarks");
+ let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.toolbarGuid,
+ url: "http://getfirefox.com",
+ title: "Download Firefox",
+ });
+ let tag = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_FOLDER,
+ parentGuid: PlacesUtils.bookmarks.tagsGuid,
+ title: "some tag",
+ });
+ let fxTag = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: tag.guid,
+ url: "http://getfirefox.com",
+ });
+
+ yield startTracking();
+
+ _("Remove the tag using the async bookmarks API");
+ yield PlacesUtils.bookmarks.remove(fxTag.guid);
+
+ yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemTagged() {
+ _("Items tagged using the asynchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert untagged bookmarks");
+ let folder1 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_FOLDER,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ title: "Folder 1",
+ });
+ let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: folder1.guid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ let folder2 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_FOLDER,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ title: "Folder 2",
+ });
+ // Different parent and title; same URL.
+ let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: folder2.guid,
+ url: "http://getfirefox.com",
+ title: "Download Firefox",
+ });
+
+ yield startTracking();
+
+ // This will change once tags are moved into a separate table (bug 424160).
+ // We specifically test this case because Bookmarks.jsm updates tagged
+ // bookmarks and notifies observers.
+ _("Insert a tag using the async bookmarks API");
+ let tag = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_FOLDER,
+ parentGuid: PlacesUtils.bookmarks.tagsGuid,
+ title: "some tag",
+ });
+
+ _("Tag an item using the async bookmarks API");
+ yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: tag.guid,
+ url: "http://getfirefox.com",
+ });
+
+ yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemKeywordChanged() {
+ _("Keyword changes via the synchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+ let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folderGUID = engine._store.GUIDForId(folder);
+ _("Track changes to keywords");
+ let uri = Utils.makeURI("http://getfirefox.com");
+ let b = PlacesUtils.bookmarks.insertBookmark(
+ folder, uri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bGUID = engine._store.GUIDForId(b);
+ _("New item is " + b);
+ _("GUID: " + bGUID);
+
+ yield startTracking();
+
+ _("Give the item a keyword");
+ PlacesUtils.bookmarks.setKeywordForBookmark(b, "the_keyword");
+
+ // bookmark should be tracked, folder should not be.
+ yield verifyTrackedItems([bGUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemKeywordChanged() {
+ _("Keyword changes via the asynchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert two bookmarks with the same URL");
+ let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.toolbarGuid,
+ url: "http://getfirefox.com",
+ title: "Download Firefox",
+ });
+
+ yield startTracking();
+
+ _("Add a keyword for both items");
+ yield PlacesUtils.keywords.insert({
+ keyword: "the_keyword",
+ url: "http://getfirefox.com",
+ postData: "postData",
+ });
+
+ yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemKeywordDeleted() {
+ _("Keyword deletions via the asynchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert two bookmarks with the same URL and keywords");
+ let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.toolbarGuid,
+ url: "http://getfirefox.com",
+ title: "Download Firefox",
+ });
+ yield PlacesUtils.keywords.insert({
+ keyword: "the_keyword",
+ url: "http://getfirefox.com",
+ });
+
+ yield startTracking();
+
+ _("Remove the keyword");
+ yield PlacesUtils.keywords.remove("the_keyword");
+
+ yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemPostDataChanged() {
+ _("Post data changes should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert a bookmark");
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ _(`Firefox GUID: ${fx_guid}`);
+
+ yield startTracking();
+
+ // PlacesUtils.setPostDataForBookmark is deprecated, but still used by
+ // PlacesTransactions.NewBookmark.
+ _("Post data for the bookmark should be ignored");
+ yield PlacesUtils.setPostDataForBookmark(fx_id, "postData");
+ yield verifyTrackerEmpty();
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemAnnoChanged() {
+ _("Item annotations should be tracked");
+
+ try {
+ yield stopTracking();
+ let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folderGUID = engine._store.GUIDForId(folder);
+ _("Track changes to annos.");
+ let b = PlacesUtils.bookmarks.insertBookmark(
+ folder, Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let bGUID = engine._store.GUIDForId(b);
+ _("New item is " + b);
+ _("GUID: " + bGUID);
+
+ yield startTracking();
+ PlacesUtils.annotations.setItemAnnotation(
+ b, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ // bookmark should be tracked, folder should not.
+ yield verifyTrackedItems([bGUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ yield resetTracker();
+
+ PlacesUtils.annotations.removeItemAnnotation(b,
+ PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO);
+ yield verifyTrackedItems([bGUID]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemAdded_filtered_root() {
+ _("Items outside the change roots should not be tracked");
+
+ try {
+ yield startTracking();
+
+ _("Create a new root");
+ let rootID = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.placesRoot,
+ "New root",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let rootGUID = engine._store.GUIDForId(rootID);
+ _(`New root GUID: ${rootGUID}`);
+
+ _("Insert a bookmark underneath the new root");
+ let untrackedBmkID = PlacesUtils.bookmarks.insertBookmark(
+ rootID,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+ let untrackedBmkGUID = engine._store.GUIDForId(untrackedBmkID);
+ _(`New untracked bookmark GUID: ${untrackedBmkGUID}`);
+
+ _("Insert a bookmark underneath the Places root");
+ let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.placesRoot,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
+ _(`New Places root bookmark GUID: ${rootBmkGUID}`);
+
+ _("New root and bookmark should be ignored");
+ yield verifyTrackedItems([]);
+ // ...But we'll still increment the score and filter out the changes at
+ // sync time.
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemDeleted_filtered_root() {
+ _("Deleted items outside the change roots should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert a bookmark underneath the Places root");
+ let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.placesRoot,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+ let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
+ _(`New Places root bookmark GUID: ${rootBmkGUID}`);
+
+ yield startTracking();
+
+ PlacesUtils.bookmarks.removeItem(rootBmkID);
+
+ // We shouldn't upload tombstones for items in filtered roots, but the
+ // `onItemRemoved` observer doesn't have enough context to determine
+ // the root, so we'll end up uploading it.
+ yield verifyTrackedItems([rootBmkGUID]);
+ // We'll increment the counter twice (once for the removed item, and once
+ // for the Places root), then filter out the root.
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onPageAnnoChanged() {
+ _("Page annotations should not be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert a bookmark without an annotation");
+ let pageURI = Utils.makeURI("http://getfirefox.com");
+ PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ pageURI,
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+
+ yield startTracking();
+
+ _("Add a page annotation");
+ PlacesUtils.annotations.setPageAnnotation(pageURI, "URIProperties/characterSet",
+ "UTF-8", 0, PlacesUtils.annotations.EXPIRE_NEVER);
+ yield verifyTrackerEmpty();
+ yield resetTracker();
+
+ _("Remove the page annotation");
+ PlacesUtils.annotations.removePageAnnotation(pageURI,
+ "URIProperties/characterSet");
+ yield verifyTrackerEmpty();
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onFaviconChanged() {
+ _("Favicon changes should not be tracked");
+
+ try {
+ yield stopTracking();
+
+ let pageURI = Utils.makeURI("http://getfirefox.com");
+ let iconURI = Utils.makeURI("http://getfirefox.com/icon");
+ PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ pageURI,
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+
+ yield PlacesTestUtils.addVisits(pageURI);
+
+ yield startTracking();
+
+ _("Favicon annotations should be ignored");
+ let iconURL = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAA" +
+ "AAAA6fptVAAAACklEQVQI12NgAAAAAgAB4iG8MwAAAABJRU5ErkJggg==";
+
+ PlacesUtils.favicons.replaceFaviconDataFromDataURL(iconURI, iconURL, 0,
+ Services.scriptSecurityManager.getSystemPrincipal());
+
+ yield new Promise(resolve => {
+ PlacesUtils.favicons.setAndFetchFaviconForPage(pageURI, iconURI, true,
+ PlacesUtils.favicons.FAVICON_LOAD_NON_PRIVATE, (iconURI, dataLen, data, mimeType) => {
+ resolve();
+ },
+ Services.scriptSecurityManager.getSystemPrincipal());
+ });
+ yield verifyTrackerEmpty();
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onLivemarkAdded() {
+ _("New livemarks should be tracked");
+
+ try {
+ yield startTracking();
+
+ _("Insert a livemark");
+ let livemark = yield PlacesUtils.livemarks.addLivemark({
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ // Use a local address just in case, to avoid potential aborts for
+ // non-local connections.
+ feedURI: Utils.makeURI("http://localhost:0"),
+ });
+ // Prevent the livemark refresh timer from requesting the URI.
+ livemark.terminate();
+
+ yield verifyTrackedItems(["menu", livemark.guid]);
+ // Three changes: one for the parent, one for creating the livemark
+ // folder, and one for setting the "livemark/feedURI" anno on the folder.
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onLivemarkDeleted() {
+ _("Deleted livemarks should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert a livemark");
+ let livemark = yield PlacesUtils.livemarks.addLivemark({
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ feedURI: Utils.makeURI("http://localhost:0"),
+ });
+ livemark.terminate();
+
+ yield startTracking();
+
+ _("Remove a livemark");
+ yield PlacesUtils.livemarks.removeLivemark({
+ guid: livemark.guid,
+ });
+
+ yield verifyTrackedItems(["menu", livemark.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemMoved() {
+ _("Items moved via the synchronous API should be tracked");
+
+ try {
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ _("Firefox GUID: " + fx_guid);
+ let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+ let tb_guid = engine._store.GUIDForId(tb_id);
+ _("Thunderbird GUID: " + tb_guid);
+
+ yield startTracking();
+
+ // Moving within the folder will just track the folder.
+ PlacesUtils.bookmarks.moveItem(
+ tb_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0);
+ yield verifyTrackedItems(['menu']);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ yield resetTracker();
+
+ // Moving a bookmark to a different folder will track the old
+ // folder, the new folder and the bookmark.
+ PlacesUtils.bookmarks.moveItem(fx_id, PlacesUtils.bookmarks.toolbarFolder,
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ yield verifyTrackedItems(['menu', 'toolbar', fx_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemMoved_update() {
+ _("Items moved via the asynchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ let fxBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ let tbBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getthunderbird.com",
+ title: "Get Thunderbird!",
+ });
+
+ yield startTracking();
+
+ _("Repositioning a bookmark should track the folder");
+ yield PlacesUtils.bookmarks.update({
+ guid: tbBmk.guid,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ index: 0,
+ });
+ yield verifyTrackedItems(['menu']);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ yield resetTracker();
+
+ _("Reparenting a bookmark should track both folders and the bookmark");
+ yield PlacesUtils.bookmarks.update({
+ guid: tbBmk.guid,
+ parentGuid: PlacesUtils.bookmarks.toolbarGuid,
+ index: PlacesUtils.bookmarks.DEFAULT_INDEX,
+ });
+ yield verifyTrackedItems(['menu', 'toolbar', tbBmk.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemMoved_reorder() {
+ _("Items reordered via the asynchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Insert out-of-order bookmarks");
+ let fxBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ _(`Firefox GUID: ${fxBmk.guid}`);
+
+ let tbBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getthunderbird.com",
+ title: "Get Thunderbird!",
+ });
+ _(`Thunderbird GUID: ${tbBmk.guid}`);
+
+ let mozBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "https://mozilla.org",
+ title: "Mozilla",
+ });
+ _(`Mozilla GUID: ${mozBmk.guid}`);
+
+ yield startTracking();
+
+ _("Reorder bookmarks");
+ yield PlacesUtils.bookmarks.reorder(PlacesUtils.bookmarks.menuGuid,
+ [mozBmk.guid, fxBmk.guid, tbBmk.guid]);
+
+ // As with setItemIndex, we should only track the folder if we reorder
+ // its children, but we should bump the score for every changed item.
+ yield verifyTrackedItems(["menu"]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemMoved_setItemIndex() {
+ _("Items with updated indices should be tracked");
+
+ try {
+ yield stopTracking();
+
+ let folder_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ "Test folder",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folder_guid = engine._store.GUIDForId(folder_id);
+ _(`Folder GUID: ${folder_guid}`);
+
+ let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ folder_id,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Thunderbird");
+ let tb_guid = engine._store.GUIDForId(tb_id);
+ _(`Thunderbird GUID: ${tb_guid}`);
+
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ folder_id,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Firefox");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ _(`Firefox GUID: ${fx_guid}`);
+
+ let moz_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("https://mozilla.org"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Mozilla"
+ );
+ let moz_guid = engine._store.GUIDForId(moz_id);
+ _(`Mozilla GUID: ${moz_guid}`);
+
+ yield startTracking();
+
+ // PlacesSortFolderByNameTransaction exercises
+ // PlacesUtils.bookmarks.setItemIndex.
+ let txn = new PlacesSortFolderByNameTransaction(folder_id);
+
+ // We're reordering items within the same folder, so only the folder
+ // should be tracked.
+ _("Execute the sort folder transaction");
+ txn.doTransaction();
+ yield verifyTrackedItems([folder_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ yield resetTracker();
+
+ _("Undo the sort folder transaction");
+ txn.undoTransaction();
+ yield verifyTrackedItems([folder_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemDeleted_removeFolderTransaction() {
+ _("Folders removed in a transaction should be tracked");
+
+ try {
+ yield stopTracking();
+
+ _("Create a folder with two children");
+ let folder_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ "Test folder",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folder_guid = engine._store.GUIDForId(folder_id);
+ _(`Folder GUID: ${folder_guid}`);
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ folder_id,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ _(`Firefox GUID: ${fx_guid}`);
+ let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ folder_id,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+ let tb_guid = engine._store.GUIDForId(tb_id);
+ _(`Thunderbird GUID: ${tb_guid}`);
+
+ yield startTracking();
+
+ let txn = PlacesUtils.bookmarks.getRemoveFolderTransaction(folder_id);
+ // We haven't executed the transaction yet.
+ yield verifyTrackerEmpty();
+
+ _("Execute the remove folder transaction");
+ txn.doTransaction();
+ yield verifyTrackedItems(["menu", folder_guid, fx_guid, tb_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6);
+ yield resetTracker();
+
+ _("Undo the remove folder transaction");
+ txn.undoTransaction();
+
+ // At this point, the restored folder has the same ID, but a different GUID.
+ let new_folder_guid = yield PlacesUtils.promiseItemGuid(folder_id);
+
+ yield verifyTrackedItems(["menu", new_folder_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ yield resetTracker();
+
+ _("Redo the transaction");
+ txn.redoTransaction();
+ yield verifyTrackedItems(["menu", new_folder_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_treeMoved() {
+ _("Moving an entire tree of bookmarks should track the parents");
+
+ try {
+ // Create a couple of parent folders.
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ "First test folder",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folder1_guid = engine._store.GUIDForId(folder1_id);
+
+ // A second folder in the first.
+ let folder2_id = PlacesUtils.bookmarks.createFolder(
+ folder1_id,
+ "Second test folder",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folder2_guid = engine._store.GUIDForId(folder2_id);
+
+ // Create a couple of bookmarks in the second folder.
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ folder2_id,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ folder2_id,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+ let tb_guid = engine._store.GUIDForId(tb_id);
+
+ yield startTracking();
+
+ // Move folder 2 to be a sibling of folder1.
+ PlacesUtils.bookmarks.moveItem(
+ folder2_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0);
+ // the menu and both folders should be tracked, the children should not be.
+ yield verifyTrackedItems(['menu', folder1_guid, folder2_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemDeleted() {
+ _("Bookmarks deleted via the synchronous API should be tracked");
+
+ try {
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+ let tb_guid = engine._store.GUIDForId(tb_id);
+
+ yield startTracking();
+
+ // Delete the last item - the item and parent should be tracked.
+ PlacesUtils.bookmarks.removeItem(tb_id);
+
+ yield verifyTrackedItems(['menu', tb_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemDeleted() {
+ _("Bookmarks deleted via the asynchronous API should be tracked");
+
+ try {
+ yield stopTracking();
+
+ let fxBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ let tbBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "http://getthunderbird.com",
+ title: "Get Thunderbird!",
+ });
+
+ yield startTracking();
+
+ _("Delete the first item");
+ yield PlacesUtils.bookmarks.remove(fxBmk.guid);
+
+ yield verifyTrackedItems(["menu", fxBmk.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_async_onItemDeleted_eraseEverything() {
+ _("Erasing everything should track all deleted items");
+
+ try {
+ yield stopTracking();
+
+ let fxBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.mobileGuid,
+ url: "http://getfirefox.com",
+ title: "Get Firefox!",
+ });
+ _(`Firefox GUID: ${fxBmk.guid}`);
+ let tbBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.mobileGuid,
+ url: "http://getthunderbird.com",
+ title: "Get Thunderbird!",
+ });
+ _(`Thunderbird GUID: ${tbBmk.guid}`);
+ let mozBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "https://mozilla.org",
+ title: "Mozilla",
+ });
+ _(`Mozilla GUID: ${mozBmk.guid}`);
+ let mdnBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: PlacesUtils.bookmarks.menuGuid,
+ url: "https://developer.mozilla.org",
+ title: "MDN",
+ });
+ _(`MDN GUID: ${mdnBmk.guid}`);
+ let bugsFolder = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_FOLDER,
+ parentGuid: PlacesUtils.bookmarks.toolbarGuid,
+ title: "Bugs",
+ });
+ _(`Bugs folder GUID: ${bugsFolder.guid}`);
+ let bzBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: bugsFolder.guid,
+ url: "https://bugzilla.mozilla.org",
+ title: "Bugzilla",
+ });
+ _(`Bugzilla GUID: ${bzBmk.guid}`);
+ let bugsChildFolder = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_FOLDER,
+ parentGuid: bugsFolder.guid,
+ title: "Bugs child",
+ });
+ _(`Bugs child GUID: ${bugsChildFolder.guid}`);
+ let bugsGrandChildBmk = yield PlacesUtils.bookmarks.insert({
+ type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
+ parentGuid: bugsChildFolder.guid,
+ url: "https://example.com",
+ title: "Bugs grandchild",
+ });
+ _(`Bugs grandchild GUID: ${bugsGrandChildBmk.guid}`);
+
+ yield startTracking();
+
+ yield PlacesUtils.bookmarks.eraseEverything();
+
+ // `eraseEverything` removes all items from the database before notifying
+ // observers. Because of this, grandchild lookup in the tracker's
+ // `onItemRemoved` observer will fail. That means we won't track
+ // (bzBmk.guid, bugsGrandChildBmk.guid, bugsChildFolder.guid), even
+ // though we should.
+ yield verifyTrackedItems(["menu", mozBmk.guid, mdnBmk.guid, "toolbar",
+ bugsFolder.guid, "mobile", fxBmk.guid,
+ tbBmk.guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 10);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemDeleted_removeFolderChildren() {
+ _("Removing a folder's children should track the folder and its children");
+
+ try {
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.mobileFolderId,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ _(`Firefox GUID: ${fx_guid}`);
+
+ let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.mobileFolderId,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+ let tb_guid = engine._store.GUIDForId(tb_id);
+ _(`Thunderbird GUID: ${tb_guid}`);
+
+ let moz_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ Utils.makeURI("https://mozilla.org"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Mozilla"
+ );
+ let moz_guid = engine._store.GUIDForId(moz_id);
+ _(`Mozilla GUID: ${moz_guid}`);
+
+ yield startTracking();
+
+ _(`Mobile root ID: ${PlacesUtils.mobileFolderId}`);
+ PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.mobileFolderId);
+
+ yield verifyTrackedItems(["mobile", fx_guid, tb_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 4);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_onItemDeleted_tree() {
+ _("Deleting a tree of bookmarks should track all items");
+
+ try {
+ // Create a couple of parent folders.
+ let folder1_id = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.bookmarksMenuFolder,
+ "First test folder",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folder1_guid = engine._store.GUIDForId(folder1_id);
+
+ // A second folder in the first.
+ let folder2_id = PlacesUtils.bookmarks.createFolder(
+ folder1_id,
+ "Second test folder",
+ PlacesUtils.bookmarks.DEFAULT_INDEX);
+ let folder2_guid = engine._store.GUIDForId(folder2_id);
+
+ // Create a couple of bookmarks in the second folder.
+ let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ folder2_id,
+ Utils.makeURI("http://getfirefox.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let fx_guid = engine._store.GUIDForId(fx_id);
+ let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ folder2_id,
+ Utils.makeURI("http://getthunderbird.com"),
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+ let tb_guid = engine._store.GUIDForId(tb_id);
+
+ yield startTracking();
+
+ // Delete folder2 - everything we created should be tracked.
+ PlacesUtils.bookmarks.removeItem(folder2_id);
+
+ yield verifyTrackedItems([fx_guid, tb_guid, folder1_guid, folder2_guid]);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 6);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
+
+add_task(function* test_mobile_query() {
+ _("Ensure we correctly create the mobile query");
+
+ try {
+ // Creates the organizer queries as a side effect.
+ let leftPaneId = PlacesUIUtils.leftPaneFolderId;
+ _(`Left pane root ID: ${leftPaneId}`);
+
+ let allBookmarksIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "AllBookmarks");
+ equal(allBookmarksIds.length, 1, "Should create folder with all bookmarks queries");
+ let allBookmarkGuid = yield PlacesUtils.promiseItemGuid(allBookmarksIds[0]);
+
+ _("Try creating query after organizer is ready");
+ tracker._ensureMobileQuery();
+ let queryIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "MobileBookmarks");
+ equal(queryIds.length, 0, "Should not create query without any mobile bookmarks");
+
+ _("Insert mobile bookmark, then create query");
+ yield PlacesUtils.bookmarks.insert({
+ parentGuid: PlacesUtils.bookmarks.mobileGuid,
+ url: "https://mozilla.org",
+ });
+ tracker._ensureMobileQuery();
+ queryIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "MobileBookmarks", {});
+ equal(queryIds.length, 1, "Should create query once mobile bookmarks exist");
+
+ let queryId = queryIds[0];
+ let queryGuid = yield PlacesUtils.promiseItemGuid(queryId);
+
+ let queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
+ equal(queryInfo.url, `place:folder=${PlacesUtils.mobileFolderId}`, "Query should point to mobile root");
+ equal(queryInfo.title, "Mobile Bookmarks", "Query title should be localized");
+ equal(queryInfo.parentGuid, allBookmarkGuid, "Should append mobile query to all bookmarks queries");
+
+ _("Rename root and query, then recreate");
+ yield PlacesUtils.bookmarks.update({
+ guid: PlacesUtils.bookmarks.mobileGuid,
+ title: "renamed root",
+ });
+ yield PlacesUtils.bookmarks.update({
+ guid: queryGuid,
+ title: "renamed query",
+ });
+ tracker._ensureMobileQuery();
+ let rootInfo = yield PlacesUtils.bookmarks.fetch(PlacesUtils.bookmarks.mobileGuid);
+ equal(rootInfo.title, "Mobile Bookmarks", "Should fix root title");
+ queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
+ equal(queryInfo.title, "Mobile Bookmarks", "Should fix query title");
+
+ _("Point query to different folder");
+ yield PlacesUtils.bookmarks.update({
+ guid: queryGuid,
+ url: "place:folder=BOOKMARKS_MENU",
+ });
+ tracker._ensureMobileQuery();
+ queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
+ equal(queryInfo.url.href, `place:folder=${PlacesUtils.mobileFolderId}`,
+ "Should fix query URL to point to mobile root");
+
+ _("We shouldn't track the query or the left pane root");
+ yield verifyTrackedCount(0);
+ do_check_eq(tracker.score, 0);
+ } finally {
+ _("Clean up.");
+ yield cleanup();
+ }
+});
diff --git a/services/sync/tests/unit/test_bookmark_validator.js b/services/sync/tests/unit/test_bookmark_validator.js
new file mode 100644
index 000000000..cc0b3b08f
--- /dev/null
+++ b/services/sync/tests/unit/test_bookmark_validator.js
@@ -0,0 +1,347 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Components.utils.import("resource://services-sync/bookmark_validator.js");
+Components.utils.import("resource://services-sync/util.js");
+
+function inspectServerRecords(data) {
+ return new BookmarkValidator().inspectServerRecords(data);
+}
+
+add_test(function test_isr_rootOnServer() {
+ let c = inspectServerRecords([{
+ id: 'places',
+ type: 'folder',
+ children: [],
+ }]);
+ ok(c.problemData.rootOnServer);
+ run_next_test();
+});
+
+add_test(function test_isr_empty() {
+ let c = inspectServerRecords([]);
+ ok(!c.problemData.rootOnServer);
+ notEqual(c.root, null);
+ run_next_test();
+});
+
+add_test(function test_isr_cycles() {
+ let c = inspectServerRecords([
+ {id: 'C', type: 'folder', children: ['A', 'B'], parentid: 'places'},
+ {id: 'A', type: 'folder', children: ['B'], parentid: 'B'},
+ {id: 'B', type: 'folder', children: ['A'], parentid: 'A'},
+ ]).problemData;
+
+ equal(c.cycles.length, 1);
+ ok(c.cycles[0].indexOf('A') >= 0);
+ ok(c.cycles[0].indexOf('B') >= 0);
+ run_next_test();
+});
+
+add_test(function test_isr_orphansMultiParents() {
+ let c = inspectServerRecords([
+ { id: 'A', type: 'bookmark', parentid: 'D' },
+ { id: 'B', type: 'folder', parentid: 'places', children: ['A']},
+ { id: 'C', type: 'folder', parentid: 'places', children: ['A']},
+
+ ]).problemData;
+ deepEqual(c.orphans, [{ id: "A", parent: "D" }]);
+ equal(c.multipleParents.length, 1)
+ ok(c.multipleParents[0].parents.indexOf('B') >= 0);
+ ok(c.multipleParents[0].parents.indexOf('C') >= 0);
+ run_next_test();
+});
+
+add_test(function test_isr_orphansMultiParents2() {
+ let c = inspectServerRecords([
+ { id: 'A', type: 'bookmark', parentid: 'D' },
+ { id: 'B', type: 'folder', parentid: 'places', children: ['A']},
+ ]).problemData;
+ equal(c.orphans.length, 1);
+ equal(c.orphans[0].id, 'A');
+ equal(c.multipleParents.length, 0);
+ run_next_test();
+});
+
+add_test(function test_isr_deletedParents() {
+ let c = inspectServerRecords([
+ { id: 'A', type: 'bookmark', parentid: 'B' },
+ { id: 'B', type: 'folder', parentid: 'places', children: ['A']},
+ { id: 'B', type: 'item', deleted: true},
+ ]).problemData;
+ deepEqual(c.deletedParents, ['A'])
+ run_next_test();
+});
+
+add_test(function test_isr_badChildren() {
+ let c = inspectServerRecords([
+ { id: 'A', type: 'bookmark', parentid: 'places', children: ['B', 'C'] },
+ { id: 'C', type: 'bookmark', parentid: 'A' }
+ ]).problemData;
+ deepEqual(c.childrenOnNonFolder, ['A'])
+ deepEqual(c.missingChildren, [{parent: 'A', child: 'B'}]);
+ deepEqual(c.parentNotFolder, ['C']);
+ run_next_test();
+});
+
+
+add_test(function test_isr_parentChildMismatches() {
+ let c = inspectServerRecords([
+ { id: 'A', type: 'folder', parentid: 'places', children: [] },
+ { id: 'B', type: 'bookmark', parentid: 'A' }
+ ]).problemData;
+ deepEqual(c.parentChildMismatches, [{parent: 'A', child: 'B'}]);
+ run_next_test();
+});
+
+add_test(function test_isr_duplicatesAndMissingIDs() {
+ let c = inspectServerRecords([
+ {id: 'A', type: 'folder', parentid: 'places', children: []},
+ {id: 'A', type: 'folder', parentid: 'places', children: []},
+ {type: 'folder', parentid: 'places', children: []}
+ ]).problemData;
+ equal(c.missingIDs, 1);
+ deepEqual(c.duplicates, ['A']);
+ run_next_test();
+});
+
+add_test(function test_isr_duplicateChildren() {
+ let c = inspectServerRecords([
+ {id: 'A', type: 'folder', parentid: 'places', children: ['B', 'B']},
+ {id: 'B', type: 'bookmark', parentid: 'A'},
+ ]).problemData;
+ deepEqual(c.duplicateChildren, ['A']);
+ run_next_test();
+});
+
+// Each compareServerWithClient test mutates these, so we can't just keep them
+// global
+function getDummyServerAndClient() {
+ let server = [
+ {
+ id: 'menu',
+ parentid: 'places',
+ type: 'folder',
+ parentName: '',
+ title: 'foo',
+ children: ['bbbbbbbbbbbb', 'cccccccccccc']
+ },
+ {
+ id: 'bbbbbbbbbbbb',
+ type: 'bookmark',
+ parentid: 'menu',
+ parentName: 'foo',
+ title: 'bar',
+ bmkUri: 'http://baz.com'
+ },
+ {
+ id: 'cccccccccccc',
+ parentid: 'menu',
+ parentName: 'foo',
+ title: '',
+ type: 'query',
+ bmkUri: 'place:type=6&sort=14&maxResults=10'
+ }
+ ];
+
+ let client = {
+ "guid": "root________",
+ "title": "",
+ "id": 1,
+ "type": "text/x-moz-place-container",
+ "children": [
+ {
+ "guid": "menu________",
+ "title": "foo",
+ "id": 1000,
+ "type": "text/x-moz-place-container",
+ "children": [
+ {
+ "guid": "bbbbbbbbbbbb",
+ "title": "bar",
+ "id": 1001,
+ "type": "text/x-moz-place",
+ "uri": "http://baz.com"
+ },
+ {
+ "guid": "cccccccccccc",
+ "title": "",
+ "id": 1002,
+ "annos": [{
+ "name": "Places/SmartBookmark",
+ "flags": 0,
+ "expires": 4,
+ "value": "RecentTags"
+ }],
+ "type": "text/x-moz-place",
+ "uri": "place:type=6&sort=14&maxResults=10"
+ }
+ ]
+ }
+ ]
+ };
+ return {server, client};
+}
+
+
+add_test(function test_cswc_valid() {
+ let {server, client} = getDummyServerAndClient();
+
+ let c = new BookmarkValidator().compareServerWithClient(server, client).problemData;
+ equal(c.clientMissing.length, 0);
+ equal(c.serverMissing.length, 0);
+ equal(c.differences.length, 0);
+ run_next_test();
+});
+
+add_test(function test_cswc_serverMissing() {
+ let {server, client} = getDummyServerAndClient();
+ // remove c
+ server.pop();
+ server[0].children.pop();
+
+ let c = new BookmarkValidator().compareServerWithClient(server, client).problemData;
+ deepEqual(c.serverMissing, ['cccccccccccc']);
+ equal(c.clientMissing.length, 0);
+ deepEqual(c.structuralDifferences, [{id: 'menu', differences: ['childGUIDs']}]);
+ run_next_test();
+});
+
+add_test(function test_cswc_clientMissing() {
+ let {server, client} = getDummyServerAndClient();
+ client.children[0].children.pop();
+
+ let c = new BookmarkValidator().compareServerWithClient(server, client).problemData;
+ deepEqual(c.clientMissing, ['cccccccccccc']);
+ equal(c.serverMissing.length, 0);
+ deepEqual(c.structuralDifferences, [{id: 'menu', differences: ['childGUIDs']}]);
+ run_next_test();
+});
+
+add_test(function test_cswc_differences() {
+ {
+ let {server, client} = getDummyServerAndClient();
+ client.children[0].children[0].title = 'asdf';
+ let c = new BookmarkValidator().compareServerWithClient(server, client).problemData;
+ equal(c.clientMissing.length, 0);
+ equal(c.serverMissing.length, 0);
+ deepEqual(c.differences, [{id: 'bbbbbbbbbbbb', differences: ['title']}]);
+ }
+
+ {
+ let {server, client} = getDummyServerAndClient();
+ server[2].type = 'bookmark';
+ let c = new BookmarkValidator().compareServerWithClient(server, client).problemData;
+ equal(c.clientMissing.length, 0);
+ equal(c.serverMissing.length, 0);
+ deepEqual(c.differences, [{id: 'cccccccccccc', differences: ['type']}]);
+ }
+ run_next_test();
+});
+
+add_test(function test_cswc_serverUnexpected() {
+ let {server, client} = getDummyServerAndClient();
+ client.children.push({
+ "guid": "dddddddddddd",
+ "title": "",
+ "id": 2000,
+ "annos": [{
+ "name": "places/excludeFromBackup",
+ "flags": 0,
+ "expires": 4,
+ "value": 1
+ }, {
+ "name": "PlacesOrganizer/OrganizerFolder",
+ "flags": 0,
+ "expires": 4,
+ "value": 7
+ }],
+ "type": "text/x-moz-place-container",
+ "children": [{
+ "guid": "eeeeeeeeeeee",
+ "title": "History",
+ "annos": [{
+ "name": "places/excludeFromBackup",
+ "flags": 0,
+ "expires": 4,
+ "value": 1
+ }, {
+ "name": "PlacesOrganizer/OrganizerQuery",
+ "flags": 0,
+ "expires": 4,
+ "value": "History"
+ }],
+ "type": "text/x-moz-place",
+ "uri": "place:type=3&sort=4"
+ }]
+ });
+ server.push({
+ id: 'dddddddddddd',
+ parentid: 'places',
+ parentName: '',
+ title: '',
+ type: 'folder',
+ children: ['eeeeeeeeeeee']
+ }, {
+ id: 'eeeeeeeeeeee',
+ parentid: 'dddddddddddd',
+ parentName: '',
+ title: 'History',
+ type: 'query',
+ bmkUri: 'place:type=3&sort=4'
+ });
+
+ let c = new BookmarkValidator().compareServerWithClient(server, client).problemData;
+ equal(c.clientMissing.length, 0);
+ equal(c.serverMissing.length, 0);
+ equal(c.serverUnexpected.length, 2);
+ deepEqual(c.serverUnexpected, ["dddddddddddd", "eeeeeeeeeeee"]);
+ run_next_test();
+});
+
+function validationPing(server, client, duration) {
+ return wait_for_ping(function() {
+ // fake this entirely
+ Svc.Obs.notify("weave:service:sync:start");
+ Svc.Obs.notify("weave:engine:sync:start", null, "bookmarks");
+ Svc.Obs.notify("weave:engine:sync:finish", null, "bookmarks");
+ let validator = new BookmarkValidator();
+ let data = {
+ // We fake duration and version just so that we can verify they're passed through.
+ duration,
+ version: validator.version,
+ recordCount: server.length,
+ problems: validator.compareServerWithClient(server, client).problemData,
+ };
+ Svc.Obs.notify("weave:engine:validate:finish", data, "bookmarks");
+ Svc.Obs.notify("weave:service:sync:finish");
+ }, true); // Allow "failing" pings, since having validation info indicates failure.
+}
+
+add_task(function *test_telemetry_integration() {
+ let {server, client} = getDummyServerAndClient();
+ // remove "c"
+ server.pop();
+ server[0].children.pop();
+ const duration = 50;
+ let ping = yield validationPing(server, client, duration);
+ ok(ping.engines);
+ let bme = ping.engines.find(e => e.name === "bookmarks");
+ ok(bme);
+ ok(bme.validation);
+ ok(bme.validation.problems)
+ equal(bme.validation.checked, server.length);
+ equal(bme.validation.took, duration);
+ bme.validation.problems.sort((a, b) => String.localeCompare(a.name, b.name));
+ equal(bme.validation.version, new BookmarkValidator().version);
+ deepEqual(bme.validation.problems, [
+ { name: "badClientRoots", count: 3 },
+ { name: "sdiff:childGUIDs", count: 1 },
+ { name: "serverMissing", count: 1 },
+ { name: "structuralDifferences", count: 1 },
+ ]);
+});
+
+function run_test() {
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_browserid_identity.js b/services/sync/tests/unit/test_browserid_identity.js
new file mode 100644
index 000000000..531c01bf6
--- /dev/null
+++ b/services/sync/tests/unit/test_browserid_identity.js
@@ -0,0 +1,890 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/FxAccounts.jsm");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://services-sync/rest.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://testing-common/services/sync/fxa_utils.js");
+Cu.import("resource://services-common/hawkclient.js");
+Cu.import("resource://gre/modules/FxAccounts.jsm");
+Cu.import("resource://gre/modules/FxAccountsClient.jsm");
+Cu.import("resource://gre/modules/FxAccountsCommon.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-common/tokenserverclient.js");
+
+const SECOND_MS = 1000;
+const MINUTE_MS = SECOND_MS * 60;
+const HOUR_MS = MINUTE_MS * 60;
+
+var identityConfig = makeIdentityConfig();
+var browseridManager = new BrowserIDManager();
+configureFxAccountIdentity(browseridManager, identityConfig);
+
+/**
+ * Mock client clock and skew vs server in FxAccounts signed-in user module and
+ * API client. browserid_identity.js queries these values to construct HAWK
+ * headers. We will use this to test clock skew compensation in these headers
+ * below.
+ */
+var MockFxAccountsClient = function() {
+ FxAccountsClient.apply(this);
+};
+MockFxAccountsClient.prototype = {
+ __proto__: FxAccountsClient.prototype,
+ accountStatus() {
+ return Promise.resolve(true);
+ }
+};
+
+function MockFxAccounts() {
+ let fxa = new FxAccounts({
+ _now_is: Date.now(),
+
+ now: function () {
+ return this._now_is;
+ },
+
+ fxAccountsClient: new MockFxAccountsClient()
+ });
+ fxa.internal.currentAccountState.getCertificate = function(data, keyPair, mustBeValidUntil) {
+ this.cert = {
+ validUntil: fxa.internal.now() + CERT_LIFETIME,
+ cert: "certificate",
+ };
+ return Promise.resolve(this.cert.cert);
+ };
+ return fxa;
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Identity").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.BrowserIDManager").level = Log.Level.Trace;
+ run_next_test();
+};
+
+add_test(function test_initial_state() {
+ _("Verify initial state");
+ do_check_false(!!browseridManager._token);
+ do_check_false(browseridManager.hasValidToken());
+ run_next_test();
+ }
+);
+
+add_task(function* test_initialializeWithCurrentIdentity() {
+ _("Verify start after initializeWithCurrentIdentity");
+ browseridManager.initializeWithCurrentIdentity();
+ yield browseridManager.whenReadyToAuthenticate.promise;
+ do_check_true(!!browseridManager._token);
+ do_check_true(browseridManager.hasValidToken());
+ do_check_eq(browseridManager.account, identityConfig.fxaccount.user.email);
+ }
+);
+
+add_task(function* test_initialializeWithAuthErrorAndDeletedAccount() {
+ _("Verify sync unpair after initializeWithCurrentIdentity with auth error + account deleted");
+
+ var identityConfig = makeIdentityConfig();
+ var browseridManager = new BrowserIDManager();
+
+ // Use the real `_getAssertion` method that calls
+ // `mockFxAClient.signCertificate`.
+ let fxaInternal = makeFxAccountsInternalMock(identityConfig);
+ delete fxaInternal._getAssertion;
+
+ configureFxAccountIdentity(browseridManager, identityConfig, fxaInternal);
+ browseridManager._fxaService.internal.initialize();
+
+ let signCertificateCalled = false;
+ let accountStatusCalled = false;
+
+ let MockFxAccountsClient = function() {
+ FxAccountsClient.apply(this);
+ };
+ MockFxAccountsClient.prototype = {
+ __proto__: FxAccountsClient.prototype,
+ signCertificate() {
+ signCertificateCalled = true;
+ return Promise.reject({
+ code: 401,
+ errno: ERRNO_INVALID_AUTH_TOKEN,
+ });
+ },
+ accountStatus() {
+ accountStatusCalled = true;
+ return Promise.resolve(false);
+ }
+ };
+
+ let mockFxAClient = new MockFxAccountsClient();
+ browseridManager._fxaService.internal._fxAccountsClient = mockFxAClient;
+
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "should reject due to an auth error");
+
+ do_check_true(signCertificateCalled);
+ do_check_true(accountStatusCalled);
+ do_check_false(browseridManager.account);
+ do_check_false(browseridManager._token);
+ do_check_false(browseridManager.hasValidToken());
+ do_check_false(browseridManager.account);
+});
+
+add_task(function* test_initialializeWithNoKeys() {
+ _("Verify start after initializeWithCurrentIdentity without kA, kB or keyFetchToken");
+ let identityConfig = makeIdentityConfig();
+ delete identityConfig.fxaccount.user.kA;
+ delete identityConfig.fxaccount.user.kB;
+ // there's no keyFetchToken by default, so the initialize should fail.
+ configureFxAccountIdentity(browseridManager, identityConfig);
+
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield browseridManager.whenReadyToAuthenticate.promise;
+ do_check_eq(Status.login, LOGIN_SUCCEEDED, "login succeeded even without keys");
+ do_check_false(browseridManager._canFetchKeys(), "_canFetchKeys reflects lack of keys");
+ do_check_eq(browseridManager._token, null, "we don't have a token");
+});
+
+add_test(function test_getResourceAuthenticator() {
+ _("BrowserIDManager supplies a Resource Authenticator callback which returns a Hawk header.");
+ configureFxAccountIdentity(browseridManager);
+ let authenticator = browseridManager.getResourceAuthenticator();
+ do_check_true(!!authenticator);
+ let req = {uri: CommonUtils.makeURI(
+ "https://example.net/somewhere/over/the/rainbow"),
+ method: 'GET'};
+ let output = authenticator(req, 'GET');
+ do_check_true('headers' in output);
+ do_check_true('authorization' in output.headers);
+ do_check_true(output.headers.authorization.startsWith('Hawk'));
+ _("Expected internal state after successful call.");
+ do_check_eq(browseridManager._token.uid, identityConfig.fxaccount.token.uid);
+ run_next_test();
+ }
+);
+
+add_test(function test_getRESTRequestAuthenticator() {
+ _("BrowserIDManager supplies a REST Request Authenticator callback which sets a Hawk header on a request object.");
+ let request = new SyncStorageRequest(
+ "https://example.net/somewhere/over/the/rainbow");
+ let authenticator = browseridManager.getRESTRequestAuthenticator();
+ do_check_true(!!authenticator);
+ let output = authenticator(request, 'GET');
+ do_check_eq(request.uri, output.uri);
+ do_check_true(output._headers.authorization.startsWith('Hawk'));
+ do_check_true(output._headers.authorization.includes('nonce'));
+ do_check_true(browseridManager.hasValidToken());
+ run_next_test();
+ }
+);
+
+add_test(function test_resourceAuthenticatorSkew() {
+ _("BrowserIDManager Resource Authenticator compensates for clock skew in Hawk header.");
+
+ // Clock is skewed 12 hours into the future
+ // We pick a date in the past so we don't risk concealing bugs in code that
+ // uses new Date() instead of our given date.
+ let now = new Date("Fri Apr 09 2004 00:00:00 GMT-0700").valueOf() + 12 * HOUR_MS;
+ let browseridManager = new BrowserIDManager();
+ let hawkClient = new HawkClient("https://example.net/v1", "/foo");
+
+ // mock fxa hawk client skew
+ hawkClient.now = function() {
+ dump("mocked client now: " + now + '\n');
+ return now;
+ }
+ // Imagine there's already been one fxa request and the hawk client has
+ // already detected skew vs the fxa auth server.
+ let localtimeOffsetMsec = -1 * 12 * HOUR_MS;
+ hawkClient._localtimeOffsetMsec = localtimeOffsetMsec;
+
+ let fxaClient = new MockFxAccountsClient();
+ fxaClient.hawk = hawkClient;
+
+ // Sanity check
+ do_check_eq(hawkClient.now(), now);
+ do_check_eq(hawkClient.localtimeOffsetMsec, localtimeOffsetMsec);
+
+ // Properly picked up by the client
+ do_check_eq(fxaClient.now(), now);
+ do_check_eq(fxaClient.localtimeOffsetMsec, localtimeOffsetMsec);
+
+ let fxa = new MockFxAccounts();
+ fxa.internal._now_is = now;
+ fxa.internal.fxAccountsClient = fxaClient;
+
+ // Picked up by the signed-in user module
+ do_check_eq(fxa.internal.now(), now);
+ do_check_eq(fxa.internal.localtimeOffsetMsec, localtimeOffsetMsec);
+
+ do_check_eq(fxa.now(), now);
+ do_check_eq(fxa.localtimeOffsetMsec, localtimeOffsetMsec);
+
+ // Mocks within mocks...
+ configureFxAccountIdentity(browseridManager, identityConfig);
+
+ // Ensure the new FxAccounts mock has a signed-in user.
+ fxa.internal.currentAccountState.signedInUser = browseridManager._fxaService.internal.currentAccountState.signedInUser;
+
+ browseridManager._fxaService = fxa;
+
+ do_check_eq(browseridManager._fxaService.internal.now(), now);
+ do_check_eq(browseridManager._fxaService.internal.localtimeOffsetMsec,
+ localtimeOffsetMsec);
+
+ do_check_eq(browseridManager._fxaService.now(), now);
+ do_check_eq(browseridManager._fxaService.localtimeOffsetMsec,
+ localtimeOffsetMsec);
+
+ let request = new SyncStorageRequest("https://example.net/i/like/pie/");
+ let authenticator = browseridManager.getResourceAuthenticator();
+ let output = authenticator(request, 'GET');
+ dump("output" + JSON.stringify(output));
+ let authHeader = output.headers.authorization;
+ do_check_true(authHeader.startsWith('Hawk'));
+
+ // Skew correction is applied in the header and we're within the two-minute
+ // window.
+ do_check_eq(getTimestamp(authHeader), now - 12 * HOUR_MS);
+ do_check_true(
+ (getTimestampDelta(authHeader, now) - 12 * HOUR_MS) < 2 * MINUTE_MS);
+
+ run_next_test();
+});
+
+add_test(function test_RESTResourceAuthenticatorSkew() {
+ _("BrowserIDManager REST Resource Authenticator compensates for clock skew in Hawk header.");
+
+ // Clock is skewed 12 hours into the future from our arbitary date
+ let now = new Date("Fri Apr 09 2004 00:00:00 GMT-0700").valueOf() + 12 * HOUR_MS;
+ let browseridManager = new BrowserIDManager();
+ let hawkClient = new HawkClient("https://example.net/v1", "/foo");
+
+ // mock fxa hawk client skew
+ hawkClient.now = function() {
+ return now;
+ }
+ // Imagine there's already been one fxa request and the hawk client has
+ // already detected skew vs the fxa auth server.
+ hawkClient._localtimeOffsetMsec = -1 * 12 * HOUR_MS;
+
+ let fxaClient = new MockFxAccountsClient();
+ fxaClient.hawk = hawkClient;
+ let fxa = new MockFxAccounts();
+ fxa.internal._now_is = now;
+ fxa.internal.fxAccountsClient = fxaClient;
+
+ configureFxAccountIdentity(browseridManager, identityConfig);
+
+ // Ensure the new FxAccounts mock has a signed-in user.
+ fxa.internal.currentAccountState.signedInUser = browseridManager._fxaService.internal.currentAccountState.signedInUser;
+
+ browseridManager._fxaService = fxa;
+
+ do_check_eq(browseridManager._fxaService.internal.now(), now);
+
+ let request = new SyncStorageRequest("https://example.net/i/like/pie/");
+ let authenticator = browseridManager.getResourceAuthenticator();
+ let output = authenticator(request, 'GET');
+ dump("output" + JSON.stringify(output));
+ let authHeader = output.headers.authorization;
+ do_check_true(authHeader.startsWith('Hawk'));
+
+ // Skew correction is applied in the header and we're within the two-minute
+ // window.
+ do_check_eq(getTimestamp(authHeader), now - 12 * HOUR_MS);
+ do_check_true(
+ (getTimestampDelta(authHeader, now) - 12 * HOUR_MS) < 2 * MINUTE_MS);
+
+ run_next_test();
+});
+
+add_task(function* test_ensureLoggedIn() {
+ configureFxAccountIdentity(browseridManager);
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield browseridManager.whenReadyToAuthenticate.promise;
+ Assert.equal(Status.login, LOGIN_SUCCEEDED, "original initialize worked");
+ yield browseridManager.ensureLoggedIn();
+ Assert.equal(Status.login, LOGIN_SUCCEEDED, "original ensureLoggedIn worked");
+ Assert.ok(browseridManager._shouldHaveSyncKeyBundle,
+ "_shouldHaveSyncKeyBundle should always be true after ensureLogin completes.");
+
+ // arrange for no logged in user.
+ let fxa = browseridManager._fxaService
+ let signedInUser = fxa.internal.currentAccountState.storageManager.accountData;
+ fxa.internal.currentAccountState.storageManager.accountData = null;
+ browseridManager.initializeWithCurrentIdentity();
+ Assert.ok(!browseridManager._shouldHaveSyncKeyBundle,
+ "_shouldHaveSyncKeyBundle should be false so we know we are testing what we think we are.");
+ Status.login = LOGIN_FAILED_NO_USERNAME;
+ yield Assert.rejects(browseridManager.ensureLoggedIn(), "expecting rejection due to no user");
+ Assert.ok(browseridManager._shouldHaveSyncKeyBundle,
+ "_shouldHaveSyncKeyBundle should always be true after ensureLogin completes.");
+ // Restore the logged in user to what it was.
+ fxa.internal.currentAccountState.storageManager.accountData = signedInUser;
+ Status.login = LOGIN_FAILED_LOGIN_REJECTED;
+ yield Assert.rejects(browseridManager.ensureLoggedIn(),
+ "LOGIN_FAILED_LOGIN_REJECTED should have caused immediate rejection");
+ Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED,
+ "status should remain LOGIN_FAILED_LOGIN_REJECTED");
+ Status.login = LOGIN_FAILED_NETWORK_ERROR;
+ yield browseridManager.ensureLoggedIn();
+ Assert.equal(Status.login, LOGIN_SUCCEEDED, "final ensureLoggedIn worked");
+});
+
+add_test(function test_tokenExpiration() {
+ _("BrowserIDManager notices token expiration:");
+ let bimExp = new BrowserIDManager();
+ configureFxAccountIdentity(bimExp, identityConfig);
+
+ let authenticator = bimExp.getResourceAuthenticator();
+ do_check_true(!!authenticator);
+ let req = {uri: CommonUtils.makeURI(
+ "https://example.net/somewhere/over/the/rainbow"),
+ method: 'GET'};
+ authenticator(req, 'GET');
+
+ // Mock the clock.
+ _("Forcing the token to expire ...");
+ Object.defineProperty(bimExp, "_now", {
+ value: function customNow() {
+ return (Date.now() + 3000001);
+ },
+ writable: true,
+ });
+ do_check_true(bimExp._token.expiration < bimExp._now());
+ _("... means BrowserIDManager knows to re-fetch it on the next call.");
+ do_check_false(bimExp.hasValidToken());
+ run_next_test();
+ }
+);
+
+add_test(function test_sha256() {
+ // Test vectors from http://www.bichlmeier.info/sha256test.html
+ let vectors = [
+ ["",
+ "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"],
+ ["abc",
+ "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"],
+ ["message digest",
+ "f7846f55cf23e14eebeab5b4e1550cad5b509e3348fbc4efa3a1413d393cb650"],
+ ["secure hash algorithm",
+ "f30ceb2bb2829e79e4ca9753d35a8ecc00262d164cc077080295381cbd643f0d"],
+ ["SHA256 is considered to be safe",
+ "6819d915c73f4d1e77e4e1b52d1fa0f9cf9beaead3939f15874bd988e2a23630"],
+ ["abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+ "248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"],
+ ["For this sample, this 63-byte string will be used as input data",
+ "f08a78cbbaee082b052ae0708f32fa1e50c5c421aa772ba5dbb406a2ea6be342"],
+ ["This is exactly 64 bytes long, not counting the terminating byte",
+ "ab64eff7e88e2e46165e29f2bce41826bd4c7b3552f6b382a9e7d3af47c245f8"]
+ ];
+ let bidUser = new BrowserIDManager();
+ for (let [input,output] of vectors) {
+ do_check_eq(CommonUtils.bytesAsHex(bidUser._sha256(input)), output);
+ }
+ run_next_test();
+});
+
+add_test(function test_computeXClientStateHeader() {
+ let kBhex = "fd5c747806c07ce0b9d69dcfea144663e630b65ec4963596a22f24910d7dd15d";
+ let kB = CommonUtils.hexToBytes(kBhex);
+
+ let bidUser = new BrowserIDManager();
+ let header = bidUser._computeXClientState(kB);
+
+ do_check_eq(header, "6ae94683571c7a7c54dab4700aa3995f");
+ run_next_test();
+});
+
+add_task(function* test_getTokenErrors() {
+ _("BrowserIDManager correctly handles various failures to get a token.");
+
+ _("Arrange for a 401 - Sync should reflect an auth error.");
+ initializeIdentityWithTokenServerResponse({
+ status: 401,
+ headers: {"content-type": "application/json"},
+ body: JSON.stringify({}),
+ });
+ let browseridManager = Service.identity;
+
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "should reject due to 401");
+ Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED, "login was rejected");
+
+ // XXX - other interesting responses to return?
+
+ // And for good measure, some totally "unexpected" errors - we generally
+ // assume these problems are going to magically go away at some point.
+ _("Arrange for an empty body with a 200 response - should reflect a network error.");
+ initializeIdentityWithTokenServerResponse({
+ status: 200,
+ headers: [],
+ body: "",
+ });
+ browseridManager = Service.identity;
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "should reject due to non-JSON response");
+ Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login state is LOGIN_FAILED_NETWORK_ERROR");
+});
+
+add_task(function* test_refreshCertificateOn401() {
+ _("BrowserIDManager refreshes the FXA certificate after a 401.");
+ var identityConfig = makeIdentityConfig();
+ var browseridManager = new BrowserIDManager();
+ // Use the real `_getAssertion` method that calls
+ // `mockFxAClient.signCertificate`.
+ let fxaInternal = makeFxAccountsInternalMock(identityConfig);
+ delete fxaInternal._getAssertion;
+ configureFxAccountIdentity(browseridManager, identityConfig, fxaInternal);
+ browseridManager._fxaService.internal.initialize();
+
+ let getCertCount = 0;
+
+ let MockFxAccountsClient = function() {
+ FxAccountsClient.apply(this);
+ };
+ MockFxAccountsClient.prototype = {
+ __proto__: FxAccountsClient.prototype,
+ signCertificate() {
+ ++getCertCount;
+ }
+ };
+
+ let mockFxAClient = new MockFxAccountsClient();
+ browseridManager._fxaService.internal._fxAccountsClient = mockFxAClient;
+
+ let didReturn401 = false;
+ let didReturn200 = false;
+ let mockTSC = mockTokenServer(() => {
+ if (getCertCount <= 1) {
+ didReturn401 = true;
+ return {
+ status: 401,
+ headers: {"content-type": "application/json"},
+ body: JSON.stringify({}),
+ };
+ } else {
+ didReturn200 = true;
+ return {
+ status: 200,
+ headers: {"content-type": "application/json"},
+ body: JSON.stringify({
+ id: "id",
+ key: "key",
+ api_endpoint: "http://example.com/",
+ uid: "uid",
+ duration: 300,
+ })
+ };
+ }
+ });
+
+ browseridManager._tokenServerClient = mockTSC;
+
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield browseridManager.whenReadyToAuthenticate.promise;
+
+ do_check_eq(getCertCount, 2);
+ do_check_true(didReturn401);
+ do_check_true(didReturn200);
+ do_check_true(browseridManager.account);
+ do_check_true(browseridManager._token);
+ do_check_true(browseridManager.hasValidToken());
+ do_check_true(browseridManager.account);
+});
+
+
+
+add_task(function* test_getTokenErrorWithRetry() {
+ _("tokenserver sends an observer notification on various backoff headers.");
+
+ // Set Sync's backoffInterval to zero - after we simulated the backoff header
+ // it should reflect the value we sent.
+ Status.backoffInterval = 0;
+ _("Arrange for a 503 with a Retry-After header.");
+ initializeIdentityWithTokenServerResponse({
+ status: 503,
+ headers: {"content-type": "application/json",
+ "retry-after": "100"},
+ body: JSON.stringify({}),
+ });
+ let browseridManager = Service.identity;
+
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "should reject due to 503");
+
+ // The observer should have fired - check it got the value in the response.
+ Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login was rejected");
+ // Sync will have the value in ms with some slop - so check it is at least that.
+ Assert.ok(Status.backoffInterval >= 100000);
+
+ _("Arrange for a 200 with an X-Backoff header.");
+ Status.backoffInterval = 0;
+ initializeIdentityWithTokenServerResponse({
+ status: 503,
+ headers: {"content-type": "application/json",
+ "x-backoff": "200"},
+ body: JSON.stringify({}),
+ });
+ browseridManager = Service.identity;
+
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "should reject due to no token in response");
+
+ // The observer should have fired - check it got the value in the response.
+ Assert.ok(Status.backoffInterval >= 200000);
+});
+
+add_task(function* test_getKeysErrorWithBackoff() {
+ _("Auth server (via hawk) sends an observer notification on backoff headers.");
+
+ // Set Sync's backoffInterval to zero - after we simulated the backoff header
+ // it should reflect the value we sent.
+ Status.backoffInterval = 0;
+ _("Arrange for a 503 with a X-Backoff header.");
+
+ let config = makeIdentityConfig();
+ // We want no kA or kB so we attempt to fetch them.
+ delete config.fxaccount.user.kA;
+ delete config.fxaccount.user.kB;
+ config.fxaccount.user.keyFetchToken = "keyfetchtoken";
+ yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ Assert.equal(method, "get");
+ Assert.equal(uri, "http://mockedserver:9999/account/keys")
+ return {
+ status: 503,
+ headers: {"content-type": "application/json",
+ "x-backoff": "100"},
+ body: "{}",
+ }
+ });
+
+ let browseridManager = Service.identity;
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "should reject due to 503");
+
+ // The observer should have fired - check it got the value in the response.
+ Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login was rejected");
+ // Sync will have the value in ms with some slop - so check it is at least that.
+ Assert.ok(Status.backoffInterval >= 100000);
+});
+
+add_task(function* test_getKeysErrorWithRetry() {
+ _("Auth server (via hawk) sends an observer notification on retry headers.");
+
+ // Set Sync's backoffInterval to zero - after we simulated the backoff header
+ // it should reflect the value we sent.
+ Status.backoffInterval = 0;
+ _("Arrange for a 503 with a Retry-After header.");
+
+ let config = makeIdentityConfig();
+ // We want no kA or kB so we attempt to fetch them.
+ delete config.fxaccount.user.kA;
+ delete config.fxaccount.user.kB;
+ config.fxaccount.user.keyFetchToken = "keyfetchtoken";
+ yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ Assert.equal(method, "get");
+ Assert.equal(uri, "http://mockedserver:9999/account/keys")
+ return {
+ status: 503,
+ headers: {"content-type": "application/json",
+ "retry-after": "100"},
+ body: "{}",
+ }
+ });
+
+ let browseridManager = Service.identity;
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "should reject due to 503");
+
+ // The observer should have fired - check it got the value in the response.
+ Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login was rejected");
+ // Sync will have the value in ms with some slop - so check it is at least that.
+ Assert.ok(Status.backoffInterval >= 100000);
+});
+
+add_task(function* test_getHAWKErrors() {
+ _("BrowserIDManager correctly handles various HAWK failures.");
+
+ _("Arrange for a 401 - Sync should reflect an auth error.");
+ let config = makeIdentityConfig();
+ yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ Assert.equal(method, "post");
+ Assert.equal(uri, "http://mockedserver:9999/certificate/sign")
+ return {
+ status: 401,
+ headers: {"content-type": "application/json"},
+ body: JSON.stringify({}),
+ }
+ });
+ Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED, "login was rejected");
+
+ // XXX - other interesting responses to return?
+
+ // And for good measure, some totally "unexpected" errors - we generally
+ // assume these problems are going to magically go away at some point.
+ _("Arrange for an empty body with a 200 response - should reflect a network error.");
+ yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ Assert.equal(method, "post");
+ Assert.equal(uri, "http://mockedserver:9999/certificate/sign")
+ return {
+ status: 200,
+ headers: [],
+ body: "",
+ }
+ });
+ Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login state is LOGIN_FAILED_NETWORK_ERROR");
+});
+
+add_task(function* test_getGetKeysFailing401() {
+ _("BrowserIDManager correctly handles 401 responses fetching keys.");
+
+ _("Arrange for a 401 - Sync should reflect an auth error.");
+ let config = makeIdentityConfig();
+ // We want no kA or kB so we attempt to fetch them.
+ delete config.fxaccount.user.kA;
+ delete config.fxaccount.user.kB;
+ config.fxaccount.user.keyFetchToken = "keyfetchtoken";
+ yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ Assert.equal(method, "get");
+ Assert.equal(uri, "http://mockedserver:9999/account/keys")
+ return {
+ status: 401,
+ headers: {"content-type": "application/json"},
+ body: "{}",
+ }
+ });
+ Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED, "login was rejected");
+});
+
+add_task(function* test_getGetKeysFailing503() {
+ _("BrowserIDManager correctly handles 5XX responses fetching keys.");
+
+ _("Arrange for a 503 - Sync should reflect a network error.");
+ let config = makeIdentityConfig();
+ // We want no kA or kB so we attempt to fetch them.
+ delete config.fxaccount.user.kA;
+ delete config.fxaccount.user.kB;
+ config.fxaccount.user.keyFetchToken = "keyfetchtoken";
+ yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ Assert.equal(method, "get");
+ Assert.equal(uri, "http://mockedserver:9999/account/keys")
+ return {
+ status: 503,
+ headers: {"content-type": "application/json"},
+ body: "{}",
+ }
+ });
+ Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "state reflects network error");
+});
+
+add_task(function* test_getKeysMissing() {
+ _("BrowserIDManager correctly handles getKeys succeeding but not returning keys.");
+
+ let browseridManager = new BrowserIDManager();
+ let identityConfig = makeIdentityConfig();
+ // our mock identity config already has kA and kB - remove them or we never
+ // try and fetch them.
+ delete identityConfig.fxaccount.user.kA;
+ delete identityConfig.fxaccount.user.kB;
+ identityConfig.fxaccount.user.keyFetchToken = 'keyFetchToken';
+
+ configureFxAccountIdentity(browseridManager, identityConfig);
+
+ // Mock a fxAccounts object that returns no keys
+ let fxa = new FxAccounts({
+ fetchAndUnwrapKeys: function () {
+ return Promise.resolve({});
+ },
+ fxAccountsClient: new MockFxAccountsClient(),
+ newAccountState(credentials) {
+ // We only expect this to be called with null indicating the (mock)
+ // storage should be read.
+ if (credentials) {
+ throw new Error("Not expecting to have credentials passed");
+ }
+ let storageManager = new MockFxaStorageManager();
+ storageManager.initialize(identityConfig.fxaccount.user);
+ return new AccountState(storageManager);
+ },
+ });
+
+ // Add a mock to the currentAccountState object.
+ fxa.internal.currentAccountState.getCertificate = function(data, keyPair, mustBeValidUntil) {
+ this.cert = {
+ validUntil: fxa.internal.now() + CERT_LIFETIME,
+ cert: "certificate",
+ };
+ return Promise.resolve(this.cert.cert);
+ };
+
+ browseridManager._fxaService = fxa;
+
+ yield browseridManager.initializeWithCurrentIdentity();
+
+ let ex;
+ try {
+ yield browseridManager.whenReadyToAuthenticate.promise;
+ } catch (e) {
+ ex = e;
+ }
+
+ Assert.ok(ex.message.indexOf("missing kA or kB") >= 0);
+});
+
+add_task(function* test_signedInUserMissing() {
+ _("BrowserIDManager detects getSignedInUser returning incomplete account data");
+
+ let browseridManager = new BrowserIDManager();
+ let config = makeIdentityConfig();
+ // Delete stored keys and the key fetch token.
+ delete identityConfig.fxaccount.user.kA;
+ delete identityConfig.fxaccount.user.kB;
+ delete identityConfig.fxaccount.user.keyFetchToken;
+
+ configureFxAccountIdentity(browseridManager, identityConfig);
+
+ let fxa = new FxAccounts({
+ fetchAndUnwrapKeys: function () {
+ return Promise.resolve({});
+ },
+ fxAccountsClient: new MockFxAccountsClient(),
+ newAccountState(credentials) {
+ // We only expect this to be called with null indicating the (mock)
+ // storage should be read.
+ if (credentials) {
+ throw new Error("Not expecting to have credentials passed");
+ }
+ let storageManager = new MockFxaStorageManager();
+ storageManager.initialize(identityConfig.fxaccount.user);
+ return new AccountState(storageManager);
+ },
+ });
+
+ browseridManager._fxaService = fxa;
+
+ let status = yield browseridManager.unlockAndVerifyAuthState();
+ Assert.equal(status, LOGIN_FAILED_LOGIN_REJECTED);
+});
+
+// End of tests
+// Utility functions follow
+
+// Create a new browserid_identity object and initialize it with a
+// hawk mock that simulates HTTP responses.
+// The callback function will be called each time the mocked hawk server wants
+// to make a request. The result of the callback should be the mock response
+// object that will be returned to hawk.
+// A token server mock will be used that doesn't hit a server, so we move
+// directly to a hawk request.
+function* initializeIdentityWithHAWKResponseFactory(config, cbGetResponse) {
+ // A mock request object.
+ function MockRESTRequest(uri, credentials, extra) {
+ this._uri = uri;
+ this._credentials = credentials;
+ this._extra = extra;
+ };
+ MockRESTRequest.prototype = {
+ setHeader: function() {},
+ post: function(data, callback) {
+ this.response = cbGetResponse("post", data, this._uri, this._credentials, this._extra);
+ callback.call(this);
+ },
+ get: function(callback) {
+ // Skip /status requests (browserid_identity checks if the account still
+ // exists after an auth error)
+ if (this._uri.startsWith("http://mockedserver:9999/account/status")) {
+ this.response = {
+ status: 200,
+ headers: {"content-type": "application/json"},
+ body: JSON.stringify({exists: true}),
+ };
+ } else {
+ this.response = cbGetResponse("get", null, this._uri, this._credentials, this._extra);
+ }
+ callback.call(this);
+ }
+ }
+
+ // The hawk client.
+ function MockedHawkClient() {}
+ MockedHawkClient.prototype = new HawkClient("http://mockedserver:9999");
+ MockedHawkClient.prototype.constructor = MockedHawkClient;
+ MockedHawkClient.prototype.newHAWKAuthenticatedRESTRequest = function(uri, credentials, extra) {
+ return new MockRESTRequest(uri, credentials, extra);
+ }
+ // Arrange for the same observerPrefix as FxAccountsClient uses
+ MockedHawkClient.prototype.observerPrefix = "FxA:hawk";
+
+ // tie it all together - configureFxAccountIdentity isn't useful here :(
+ let fxaClient = new MockFxAccountsClient();
+ fxaClient.hawk = new MockedHawkClient();
+ let internal = {
+ fxAccountsClient: fxaClient,
+ newAccountState(credentials) {
+ // We only expect this to be called with null indicating the (mock)
+ // storage should be read.
+ if (credentials) {
+ throw new Error("Not expecting to have credentials passed");
+ }
+ let storageManager = new MockFxaStorageManager();
+ storageManager.initialize(config.fxaccount.user);
+ return new AccountState(storageManager);
+ },
+ }
+ let fxa = new FxAccounts(internal);
+
+ browseridManager._fxaService = fxa;
+ browseridManager._signedInUser = null;
+ yield browseridManager.initializeWithCurrentIdentity();
+ yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ "expecting rejection due to hawk error");
+}
+
+
+function getTimestamp(hawkAuthHeader) {
+ return parseInt(/ts="(\d+)"/.exec(hawkAuthHeader)[1], 10) * SECOND_MS;
+}
+
+function getTimestampDelta(hawkAuthHeader, now=Date.now()) {
+ return Math.abs(getTimestamp(hawkAuthHeader) - now);
+}
+
+function mockTokenServer(func) {
+ let requestLog = Log.repository.getLogger("testing.mock-rest");
+ if (!requestLog.appenders.length) { // might as well see what it says :)
+ requestLog.addAppender(new Log.DumpAppender());
+ requestLog.level = Log.Level.Trace;
+ }
+ function MockRESTRequest(url) {};
+ MockRESTRequest.prototype = {
+ _log: requestLog,
+ setHeader: function() {},
+ get: function(callback) {
+ this.response = func();
+ callback.call(this);
+ }
+ }
+ // The mocked TokenServer client which will get the response.
+ function MockTSC() { }
+ MockTSC.prototype = new TokenServerClient();
+ MockTSC.prototype.constructor = MockTSC;
+ MockTSC.prototype.newRESTRequest = function(url) {
+ return new MockRESTRequest(url);
+ }
+ // Arrange for the same observerPrefix as browserid_identity uses.
+ MockTSC.prototype.observerPrefix = "weave:service";
+ return new MockTSC();
+}
diff --git a/services/sync/tests/unit/test_clients_engine.js b/services/sync/tests/unit/test_clients_engine.js
new file mode 100644
index 000000000..d2123f80a
--- /dev/null
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -0,0 +1,1439 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+const MORE_THAN_CLIENTS_TTL_REFRESH = 691200; // 8 days
+const LESS_THAN_CLIENTS_TTL_REFRESH = 86400; // 1 day
+
+var engine = Service.clientsEngine;
+
+/**
+ * Unpack the record with this ID, and verify that it has the same version that
+ * we should be putting into records.
+ */
+function check_record_version(user, id) {
+ let payload = JSON.parse(user.collection("clients").wbo(id).payload);
+
+ let rec = new CryptoWrapper();
+ rec.id = id;
+ rec.collection = "clients";
+ rec.ciphertext = payload.ciphertext;
+ rec.hmac = payload.hmac;
+ rec.IV = payload.IV;
+
+ let cleartext = rec.decrypt(Service.collectionKeys.keyForCollection("clients"));
+
+ _("Payload is " + JSON.stringify(cleartext));
+ equal(Services.appinfo.version, cleartext.version);
+ equal(2, cleartext.protocols.length);
+ equal("1.1", cleartext.protocols[0]);
+ equal("1.5", cleartext.protocols[1]);
+}
+
+add_test(function test_bad_hmac() {
+ _("Ensure that Clients engine deletes corrupt records.");
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let deletedCollections = [];
+ let deletedItems = [];
+ let callback = {
+ __proto__: SyncServerCallback,
+ onItemDeleted: function (username, coll, wboID) {
+ deletedItems.push(coll + "/" + wboID);
+ },
+ onCollectionDeleted: function (username, coll) {
+ deletedCollections.push(coll);
+ }
+ }
+ let server = serverForUsers({"foo": "password"}, contents, callback);
+ let user = server.user("foo");
+
+ function check_clients_count(expectedCount) {
+ let stack = Components.stack.caller;
+ let coll = user.collection("clients");
+
+ // Treat a non-existent collection as empty.
+ equal(expectedCount, coll ? coll.count() : 0, stack);
+ }
+
+ function check_client_deleted(id) {
+ let coll = user.collection("clients");
+ let wbo = coll.wbo(id);
+ return !wbo || !wbo.payload;
+ }
+
+ function uploadNewKeys() {
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+ }
+
+ try {
+ ensureLegacyIdentityManager();
+ let passphrase = "abcdeabcdeabcdeabcdeabcdea";
+ Service.serverURL = server.baseURI;
+ Service.login("foo", "ilovejane", passphrase);
+
+ generateNewKeys(Service.collectionKeys);
+
+ _("First sync, client record is uploaded");
+ equal(engine.lastRecordUpload, 0);
+ check_clients_count(0);
+ engine._sync();
+ check_clients_count(1);
+ ok(engine.lastRecordUpload > 0);
+
+ // Our uploaded record has a version.
+ check_record_version(user, engine.localID);
+
+ // Initial setup can wipe the server, so clean up.
+ deletedCollections = [];
+ deletedItems = [];
+
+ _("Change our keys and our client ID, reupload keys.");
+ let oldLocalID = engine.localID; // Preserve to test for deletion!
+ engine.localID = Utils.makeGUID();
+ engine.resetClient();
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+
+ _("Sync.");
+ engine._sync();
+
+ _("Old record " + oldLocalID + " was deleted, new one uploaded.");
+ check_clients_count(1);
+ check_client_deleted(oldLocalID);
+
+ _("Now change our keys but don't upload them. " +
+ "That means we get an HMAC error but redownload keys.");
+ Service.lastHMACEvent = 0;
+ engine.localID = Utils.makeGUID();
+ engine.resetClient();
+ generateNewKeys(Service.collectionKeys);
+ deletedCollections = [];
+ deletedItems = [];
+ check_clients_count(1);
+ engine._sync();
+
+ _("Old record was not deleted, new one uploaded.");
+ equal(deletedCollections.length, 0);
+ equal(deletedItems.length, 0);
+ check_clients_count(2);
+
+ _("Now try the scenario where our keys are wrong *and* there's a bad record.");
+ // Clean up and start fresh.
+ user.collection("clients")._wbos = {};
+ Service.lastHMACEvent = 0;
+ engine.localID = Utils.makeGUID();
+ engine.resetClient();
+ deletedCollections = [];
+ deletedItems = [];
+ check_clients_count(0);
+
+ uploadNewKeys();
+
+ // Sync once to upload a record.
+ engine._sync();
+ check_clients_count(1);
+
+ // Generate and upload new keys, so the old client record is wrong.
+ uploadNewKeys();
+
+ // Create a new client record and new keys. Now our keys are wrong, as well
+ // as the object on the server. We'll download the new keys and also delete
+ // the bad client record.
+ oldLocalID = engine.localID; // Preserve to test for deletion!
+ engine.localID = Utils.makeGUID();
+ engine.resetClient();
+ generateNewKeys(Service.collectionKeys);
+ let oldKey = Service.collectionKeys.keyForCollection();
+
+ equal(deletedCollections.length, 0);
+ equal(deletedItems.length, 0);
+ engine._sync();
+ equal(deletedItems.length, 1);
+ check_client_deleted(oldLocalID);
+ check_clients_count(1);
+ let newKey = Service.collectionKeys.keyForCollection();
+ ok(!oldKey.equals(newKey));
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_properties() {
+ _("Test lastRecordUpload property");
+ try {
+ equal(Svc.Prefs.get("clients.lastRecordUpload"), undefined);
+ equal(engine.lastRecordUpload, 0);
+
+ let now = Date.now();
+ engine.lastRecordUpload = now / 1000;
+ equal(engine.lastRecordUpload, Math.floor(now / 1000));
+ } finally {
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ }
+});
+
+add_test(function test_full_sync() {
+ _("Ensure that Clients engine fetches all records for each sync.");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ let activeID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({
+ id: activeID,
+ name: "Active client",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ let deletedID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(deletedID, encryptPayload({
+ id: deletedID,
+ name: "Client to delete",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ try {
+ let store = engine._store;
+
+ _("First sync. 2 records downloaded; our record uploaded.");
+ strictEqual(engine.lastRecordUpload, 0);
+ engine._sync();
+ ok(engine.lastRecordUpload > 0);
+ deepEqual(user.collection("clients").keys().sort(),
+ [activeID, deletedID, engine.localID].sort(),
+ "Our record should be uploaded on first sync");
+ deepEqual(Object.keys(store.getAllIDs()).sort(),
+ [activeID, deletedID, engine.localID].sort(),
+ "Other clients should be downloaded on first sync");
+
+ _("Delete a record, then sync again");
+ let collection = server.getCollection("foo", "clients");
+ collection.remove(deletedID);
+ // Simulate a timestamp update in info/collections.
+ engine.lastModified = now;
+ engine._sync();
+
+ _("Record should be updated");
+ deepEqual(Object.keys(store.getAllIDs()).sort(),
+ [activeID, engine.localID].sort(),
+ "Deleted client should be removed on next sync");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_sync() {
+ _("Ensure that Clients engine uploads a new client record once a week.");
+
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ function clientWBO() {
+ return user.collection("clients").wbo(engine.localID);
+ }
+
+ try {
+
+ _("First sync. Client record is uploaded.");
+ equal(clientWBO(), undefined);
+ equal(engine.lastRecordUpload, 0);
+ engine._sync();
+ ok(!!clientWBO().payload);
+ ok(engine.lastRecordUpload > 0);
+
+ _("Let's time travel more than a week back, new record should've been uploaded.");
+ engine.lastRecordUpload -= MORE_THAN_CLIENTS_TTL_REFRESH;
+ let lastweek = engine.lastRecordUpload;
+ clientWBO().payload = undefined;
+ engine._sync();
+ ok(!!clientWBO().payload);
+ ok(engine.lastRecordUpload > lastweek);
+
+ _("Remove client record.");
+ engine.removeClientData();
+ equal(clientWBO().payload, undefined);
+
+ _("Time travel one day back, no record uploaded.");
+ engine.lastRecordUpload -= LESS_THAN_CLIENTS_TTL_REFRESH;
+ let yesterday = engine.lastRecordUpload;
+ engine._sync();
+ equal(clientWBO().payload, undefined);
+ equal(engine.lastRecordUpload, yesterday);
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_client_name_change() {
+ _("Ensure client name change incurs a client record update.");
+
+ let tracker = engine._tracker;
+
+ let localID = engine.localID;
+ let initialName = engine.localName;
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+ _("initial name: " + initialName);
+
+ // Tracker already has data, so clear it.
+ tracker.clearChangedIDs();
+
+ let initialScore = tracker.score;
+
+ equal(Object.keys(tracker.changedIDs).length, 0);
+
+ Svc.Prefs.set("client.name", "new name");
+
+ _("new name: " + engine.localName);
+ notEqual(initialName, engine.localName);
+ equal(Object.keys(tracker.changedIDs).length, 1);
+ ok(engine.localID in tracker.changedIDs);
+ ok(tracker.score > initialScore);
+ ok(tracker.score >= SCORE_INCREMENT_XLARGE);
+
+ Svc.Obs.notify("weave:engine:stop-tracking");
+
+ run_next_test();
+});
+
+add_test(function test_send_command() {
+ _("Verifies _sendCommandToClient puts commands in the outbound queue.");
+
+ let store = engine._store;
+ let tracker = engine._tracker;
+ let remoteId = Utils.makeGUID();
+ let rec = new ClientsRec("clients", remoteId);
+
+ store.create(rec);
+ let remoteRecord = store.createRecord(remoteId, "clients");
+
+ let action = "testCommand";
+ let args = ["foo", "bar"];
+
+ engine._sendCommandToClient(action, args, remoteId);
+
+ let newRecord = store._remoteClients[remoteId];
+ let clientCommands = engine._readCommands()[remoteId];
+ notEqual(newRecord, undefined);
+ equal(clientCommands.length, 1);
+
+ let command = clientCommands[0];
+ equal(command.command, action);
+ equal(command.args.length, 2);
+ deepEqual(command.args, args);
+
+ notEqual(tracker.changedIDs[remoteId], undefined);
+
+ run_next_test();
+});
+
+add_test(function test_command_validation() {
+ _("Verifies that command validation works properly.");
+
+ let store = engine._store;
+
+ let testCommands = [
+ ["resetAll", [], true ],
+ ["resetAll", ["foo"], false],
+ ["resetEngine", ["tabs"], true ],
+ ["resetEngine", [], false],
+ ["wipeAll", [], true ],
+ ["wipeAll", ["foo"], false],
+ ["wipeEngine", ["tabs"], true ],
+ ["wipeEngine", [], false],
+ ["logout", [], true ],
+ ["logout", ["foo"], false],
+ ["__UNKNOWN__", [], false]
+ ];
+
+ for (let [action, args, expectedResult] of testCommands) {
+ let remoteId = Utils.makeGUID();
+ let rec = new ClientsRec("clients", remoteId);
+
+ store.create(rec);
+ store.createRecord(remoteId, "clients");
+
+ engine.sendCommand(action, args, remoteId);
+
+ let newRecord = store._remoteClients[remoteId];
+ notEqual(newRecord, undefined);
+
+ let clientCommands = engine._readCommands()[remoteId];
+
+ if (expectedResult) {
+ _("Ensuring command is sent: " + action);
+ equal(clientCommands.length, 1);
+
+ let command = clientCommands[0];
+ equal(command.command, action);
+ deepEqual(command.args, args);
+
+ notEqual(engine._tracker, undefined);
+ notEqual(engine._tracker.changedIDs[remoteId], undefined);
+ } else {
+ _("Ensuring command is scrubbed: " + action);
+ equal(clientCommands, undefined);
+
+ if (store._tracker) {
+ equal(engine._tracker[remoteId], undefined);
+ }
+ }
+
+ }
+ run_next_test();
+});
+
+add_test(function test_command_duplication() {
+ _("Ensures duplicate commands are detected and not added");
+
+ let store = engine._store;
+ let remoteId = Utils.makeGUID();
+ let rec = new ClientsRec("clients", remoteId);
+ store.create(rec);
+ store.createRecord(remoteId, "clients");
+
+ let action = "resetAll";
+ let args = [];
+
+ engine.sendCommand(action, args, remoteId);
+ engine.sendCommand(action, args, remoteId);
+
+ let newRecord = store._remoteClients[remoteId];
+ let clientCommands = engine._readCommands()[remoteId];
+ equal(clientCommands.length, 1);
+
+ _("Check variant args length");
+ engine._saveCommands({});
+
+ action = "resetEngine";
+ engine.sendCommand(action, [{ x: "foo" }], remoteId);
+ engine.sendCommand(action, [{ x: "bar" }], remoteId);
+
+ _("Make sure we spot a real dupe argument.");
+ engine.sendCommand(action, [{ x: "bar" }], remoteId);
+
+ clientCommands = engine._readCommands()[remoteId];
+ equal(clientCommands.length, 2);
+
+ run_next_test();
+});
+
+add_test(function test_command_invalid_client() {
+ _("Ensures invalid client IDs are caught");
+
+ let id = Utils.makeGUID();
+ let error;
+
+ try {
+ engine.sendCommand("wipeAll", [], id);
+ } catch (ex) {
+ error = ex;
+ }
+
+ equal(error.message.indexOf("Unknown remote client ID: "), 0);
+
+ run_next_test();
+});
+
+add_test(function test_process_incoming_commands() {
+ _("Ensures local commands are executed");
+
+ engine.localCommands = [{ command: "logout", args: [] }];
+
+ let ev = "weave:service:logout:finish";
+
+ var handler = function() {
+ Svc.Obs.remove(ev, handler);
+
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+
+ run_next_test();
+ };
+
+ Svc.Obs.add(ev, handler);
+
+ // logout command causes processIncomingCommands to return explicit false.
+ ok(!engine.processIncomingCommands());
+});
+
+add_test(function test_filter_duplicate_names() {
+ _("Ensure that we exclude clients with identical names that haven't synced in a week.");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ // Synced recently.
+ let recentID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(recentID, encryptPayload({
+ id: recentID,
+ name: "My Phone",
+ type: "mobile",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ // Dupe of our client, synced more than 1 week ago.
+ let dupeID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(dupeID, encryptPayload({
+ id: dupeID,
+ name: engine.localName,
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 604810));
+
+ // Synced more than 1 week ago, but not a dupe.
+ let oldID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(oldID, encryptPayload({
+ id: oldID,
+ name: "My old desktop",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 604820));
+
+ try {
+ let store = engine._store;
+
+ _("First sync");
+ strictEqual(engine.lastRecordUpload, 0);
+ engine._sync();
+ ok(engine.lastRecordUpload > 0);
+ deepEqual(user.collection("clients").keys().sort(),
+ [recentID, dupeID, oldID, engine.localID].sort(),
+ "Our record should be uploaded on first sync");
+
+ deepEqual(Object.keys(store.getAllIDs()).sort(),
+ [recentID, dupeID, oldID, engine.localID].sort(),
+ "Duplicate ID should remain in getAllIDs");
+ ok(engine._store.itemExists(dupeID), "Dupe ID should be considered as existing for Sync methods.");
+ ok(!engine.remoteClientExists(dupeID), "Dupe ID should not be considered as existing for external methods.");
+
+ // dupe desktop should not appear in .deviceTypes.
+ equal(engine.deviceTypes.get("desktop"), 2);
+ equal(engine.deviceTypes.get("mobile"), 1);
+
+ // dupe desktop should not appear in stats
+ deepEqual(engine.stats, {
+ hasMobile: 1,
+ names: [engine.localName, "My Phone", "My old desktop"],
+ numClients: 3,
+ });
+
+ ok(engine.remoteClientExists(oldID), "non-dupe ID should exist.");
+ ok(!engine.remoteClientExists(dupeID), "dupe ID should not exist");
+ equal(engine.remoteClients.length, 2, "dupe should not be in remoteClients");
+
+ // Check that a subsequent Sync doesn't report anything as being processed.
+ let counts;
+ Svc.Obs.add("weave:engine:sync:applied", function observe(subject, data) {
+ Svc.Obs.remove("weave:engine:sync:applied", observe);
+ counts = subject;
+ });
+
+ engine._sync();
+ equal(counts.applied, 0); // We didn't report applying any records.
+ equal(counts.reconciled, 4); // We reported reconcilliation for all records
+ equal(counts.succeeded, 0);
+ equal(counts.failed, 0);
+ equal(counts.newFailed, 0);
+
+ _("Broadcast logout to all clients");
+ engine.sendCommand("logout", []);
+ engine._sync();
+
+ let collection = server.getCollection("foo", "clients");
+ let recentPayload = JSON.parse(JSON.parse(collection.payload(recentID)).ciphertext);
+ deepEqual(recentPayload.commands, [{ command: "logout", args: [] }],
+ "Should send commands to the recent client");
+
+ let oldPayload = JSON.parse(JSON.parse(collection.payload(oldID)).ciphertext);
+ deepEqual(oldPayload.commands, [{ command: "logout", args: [] }],
+ "Should send commands to the week-old client");
+
+ let dupePayload = JSON.parse(JSON.parse(collection.payload(dupeID)).ciphertext);
+ deepEqual(dupePayload.commands, [],
+ "Should not send commands to the dupe client");
+
+ _("Update the dupe client's modified time");
+ server.insertWBO("foo", "clients", new ServerWBO(dupeID, encryptPayload({
+ id: dupeID,
+ name: engine.localName,
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ _("Second sync.");
+ engine._sync();
+
+ deepEqual(Object.keys(store.getAllIDs()).sort(),
+ [recentID, oldID, dupeID, engine.localID].sort(),
+ "Stale client synced, so it should no longer be marked as a dupe");
+
+ ok(engine.remoteClientExists(dupeID), "Dupe ID should appear as it synced.");
+
+ // Recently synced dupe desktop should appear in .deviceTypes.
+ equal(engine.deviceTypes.get("desktop"), 3);
+
+ // Recently synced dupe desktop should now appear in stats
+ deepEqual(engine.stats, {
+ hasMobile: 1,
+ names: [engine.localName, "My Phone", engine.localName, "My old desktop"],
+ numClients: 4,
+ });
+
+ ok(engine.remoteClientExists(dupeID), "recently synced dupe ID should now exist");
+ equal(engine.remoteClients.length, 3, "recently synced dupe should now be in remoteClients");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_command_sync() {
+ _("Ensure that commands are synced across clients.");
+
+ engine._store.wipe();
+ generateNewKeys(Service.collectionKeys);
+
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ new SyncTestingInfrastructure(server.server);
+
+ let user = server.user("foo");
+ let remoteId = Utils.makeGUID();
+
+ function clientWBO(id) {
+ return user.collection("clients").wbo(id);
+ }
+
+ _("Create remote client record");
+ server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({
+ id: remoteId,
+ name: "Remote client",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), Date.now() / 1000));
+
+ try {
+ _("Syncing.");
+ engine._sync();
+
+ _("Checking remote record was downloaded.");
+ let clientRecord = engine._store._remoteClients[remoteId];
+ notEqual(clientRecord, undefined);
+ equal(clientRecord.commands.length, 0);
+
+ _("Send a command to the remote client.");
+ engine.sendCommand("wipeAll", []);
+ let clientCommands = engine._readCommands()[remoteId];
+ equal(clientCommands.length, 1);
+ engine._sync();
+
+ _("Checking record was uploaded.");
+ notEqual(clientWBO(engine.localID).payload, undefined);
+ ok(engine.lastRecordUpload > 0);
+
+ notEqual(clientWBO(remoteId).payload, undefined);
+
+ Svc.Prefs.set("client.GUID", remoteId);
+ engine._resetClient();
+ equal(engine.localID, remoteId);
+ _("Performing sync on resetted client.");
+ engine._sync();
+ notEqual(engine.localCommands, undefined);
+ equal(engine.localCommands.length, 1);
+
+ let command = engine.localCommands[0];
+ equal(command.command, "wipeAll");
+ equal(command.args.length, 0);
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+
+ try {
+ let collection = server.getCollection("foo", "clients");
+ collection.remove(remoteId);
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_send_uri_to_client_for_display() {
+ _("Ensure sendURIToClientForDisplay() sends command properly.");
+
+ let tracker = engine._tracker;
+ let store = engine._store;
+
+ let remoteId = Utils.makeGUID();
+ let rec = new ClientsRec("clients", remoteId);
+ rec.name = "remote";
+ store.create(rec);
+ let remoteRecord = store.createRecord(remoteId, "clients");
+
+ tracker.clearChangedIDs();
+ let initialScore = tracker.score;
+
+ let uri = "http://www.mozilla.org/";
+ let title = "Title of the Page";
+ engine.sendURIToClientForDisplay(uri, remoteId, title);
+
+ let newRecord = store._remoteClients[remoteId];
+
+ notEqual(newRecord, undefined);
+ let clientCommands = engine._readCommands()[remoteId];
+ equal(clientCommands.length, 1);
+
+ let command = clientCommands[0];
+ equal(command.command, "displayURI");
+ equal(command.args.length, 3);
+ equal(command.args[0], uri);
+ equal(command.args[1], engine.localID);
+ equal(command.args[2], title);
+
+ ok(tracker.score > initialScore);
+ ok(tracker.score - initialScore >= SCORE_INCREMENT_XLARGE);
+
+ _("Ensure unknown client IDs result in exception.");
+ let unknownId = Utils.makeGUID();
+ let error;
+
+ try {
+ engine.sendURIToClientForDisplay(uri, unknownId);
+ } catch (ex) {
+ error = ex;
+ }
+
+ equal(error.message.indexOf("Unknown remote client ID: "), 0);
+
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+
+ run_next_test();
+});
+
+add_test(function test_receive_display_uri() {
+ _("Ensure processing of received 'displayURI' commands works.");
+
+ // We don't set up WBOs and perform syncing because other tests verify
+ // the command API works as advertised. This saves us a little work.
+
+ let uri = "http://www.mozilla.org/";
+ let remoteId = Utils.makeGUID();
+ let title = "Page Title!";
+
+ let command = {
+ command: "displayURI",
+ args: [uri, remoteId, title],
+ };
+
+ engine.localCommands = [command];
+
+ // Received 'displayURI' command should result in the topic defined below
+ // being called.
+ let ev = "weave:engine:clients:display-uris";
+
+ let handler = function(subject, data) {
+ Svc.Obs.remove(ev, handler);
+
+ equal(subject[0].uri, uri);
+ equal(subject[0].clientId, remoteId);
+ equal(subject[0].title, title);
+ equal(data, null);
+
+ run_next_test();
+ };
+
+ Svc.Obs.add(ev, handler);
+
+ ok(engine.processIncomingCommands());
+
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+});
+
+add_test(function test_optional_client_fields() {
+ _("Ensure that we produce records with the fields added in Bug 1097222.");
+
+ const SUPPORTED_PROTOCOL_VERSIONS = ["1.1", "1.5"];
+ let local = engine._store.createRecord(engine.localID, "clients");
+ equal(local.name, engine.localName);
+ equal(local.type, engine.localType);
+ equal(local.version, Services.appinfo.version);
+ deepEqual(local.protocols, SUPPORTED_PROTOCOL_VERSIONS);
+
+ // Optional fields.
+ // Make sure they're what they ought to be...
+ equal(local.os, Services.appinfo.OS);
+ equal(local.appPackage, Services.appinfo.ID);
+
+ // ... and also that they're non-empty.
+ ok(!!local.os);
+ ok(!!local.appPackage);
+ ok(!!local.application);
+
+ // We don't currently populate device or formfactor.
+ // See Bug 1100722, Bug 1100723.
+
+ engine._resetClient();
+ run_next_test();
+});
+
+add_test(function test_merge_commands() {
+ _("Verifies local commands for remote clients are merged with the server's");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ let desktopID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
+ id: desktopID,
+ name: "Desktop client",
+ type: "desktop",
+ commands: [{
+ command: "displayURI",
+ args: ["https://example.com", engine.localID, "Yak Herders Anonymous"],
+ }],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ let mobileID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(mobileID, encryptPayload({
+ id: mobileID,
+ name: "Mobile client",
+ type: "mobile",
+ commands: [{
+ command: "logout",
+ args: [],
+ }],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ try {
+ let store = engine._store;
+
+ _("First sync. 2 records downloaded.");
+ strictEqual(engine.lastRecordUpload, 0);
+ engine._sync();
+
+ _("Broadcast logout to all clients");
+ engine.sendCommand("logout", []);
+ engine._sync();
+
+ let collection = server.getCollection("foo", "clients");
+ let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext);
+ deepEqual(desktopPayload.commands, [{
+ command: "displayURI",
+ args: ["https://example.com", engine.localID, "Yak Herders Anonymous"],
+ }, {
+ command: "logout",
+ args: [],
+ }], "Should send the logout command to the desktop client");
+
+ let mobilePayload = JSON.parse(JSON.parse(collection.payload(mobileID)).ciphertext);
+ deepEqual(mobilePayload.commands, [{ command: "logout", args: [] }],
+ "Should not send a duplicate logout to the mobile client");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_duplicate_remote_commands() {
+ _("Verifies local commands for remote clients are sent only once (bug 1289287)");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ let desktopID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
+ id: desktopID,
+ name: "Desktop client",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ try {
+ let store = engine._store;
+
+ _("First sync. 1 record downloaded.");
+ strictEqual(engine.lastRecordUpload, 0);
+ engine._sync();
+
+ _("Send tab to client");
+ engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"]);
+ engine._sync();
+
+ _("Simulate the desktop client consuming the command and syncing to the server");
+ server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
+ id: desktopID,
+ name: "Desktop client",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ _("Send another tab to the desktop client");
+ engine.sendCommand("displayURI", ["https://foobar.com", engine.localID, "Foo bar!"], desktopID);
+ engine._sync();
+
+ let collection = server.getCollection("foo", "clients");
+ let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext);
+ deepEqual(desktopPayload.commands, [{
+ command: "displayURI",
+ args: ["https://foobar.com", engine.localID, "Foo bar!"],
+ }], "Should only send the second command to the desktop client");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_upload_after_reboot() {
+ _("Multiple downloads, reboot, then upload (bug 1289287)");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ let deviceBID = Utils.makeGUID();
+ let deviceCID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({
+ id: deviceBID,
+ name: "Device B",
+ type: "desktop",
+ commands: [{
+ command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
+ }],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+ server.insertWBO("foo", "clients", new ServerWBO(deviceCID, encryptPayload({
+ id: deviceCID,
+ name: "Device C",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ try {
+ let store = engine._store;
+
+ _("First sync. 2 records downloaded.");
+ strictEqual(engine.lastRecordUpload, 0);
+ engine._sync();
+
+ _("Send tab to client");
+ engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"], deviceBID);
+
+ const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
+ SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten.call(engine, [], [deviceBID]);
+ engine._sync();
+
+ let collection = server.getCollection("foo", "clients");
+ let deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
+ deepEqual(deviceBPayload.commands, [{
+ command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
+ }], "Should be the same because the upload failed");
+
+ _("Simulate the client B consuming the command and syncing to the server");
+ server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({
+ id: deviceBID,
+ name: "Device B",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ // Simulate reboot
+ SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing;
+ engine = Service.clientsEngine = new ClientEngine(Service);
+
+ engine._sync();
+
+ deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
+ deepEqual(deviceBPayload.commands, [{
+ command: "displayURI",
+ args: ["https://example.com", engine.localID, "Yak Herders Anonymous"],
+ }], "Should only had written our outgoing command");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_keep_cleared_commands_after_reboot() {
+ _("Download commands, fail upload, reboot, then apply new commands (bug 1289287)");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ let deviceBID = Utils.makeGUID();
+ let deviceCID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload({
+ id: engine.localID,
+ name: "Device A",
+ type: "desktop",
+ commands: [{
+ command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"]
+ },
+ {
+ command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
+ }],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+ server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({
+ id: deviceBID,
+ name: "Device B",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+ server.insertWBO("foo", "clients", new ServerWBO(deviceCID, encryptPayload({
+ id: deviceCID,
+ name: "Device C",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ try {
+ let store = engine._store;
+
+ _("First sync. Download remote and our record.");
+ strictEqual(engine.lastRecordUpload, 0);
+
+ let collection = server.getCollection("foo", "clients");
+ const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
+ SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten.call(engine, [], [deviceBID]);
+ let commandsProcessed = 0;
+ engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length };
+
+ engine._sync();
+ engine.processIncomingCommands(); // Not called by the engine.sync(), gotta call it ourselves
+ equal(commandsProcessed, 2, "We processed 2 commands");
+
+ let localRemoteRecord = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
+ deepEqual(localRemoteRecord.commands, [{
+ command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"]
+ },
+ {
+ command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
+ }], "Should be the same because the upload failed");
+
+ // Another client sends another link
+ server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload({
+ id: engine.localID,
+ name: "Device A",
+ type: "desktop",
+ commands: [{
+ command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"]
+ },
+ {
+ command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
+ },
+ {
+ command: "displayURI", args: ["https://deviceclink2.com", deviceCID, "Device C link 2"]
+ }],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ // Simulate reboot
+ SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing;
+ engine = Service.clientsEngine = new ClientEngine(Service);
+
+ commandsProcessed = 0;
+ engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length };
+ engine._sync();
+ engine.processIncomingCommands();
+ equal(commandsProcessed, 1, "We processed one command (the other were cleared)");
+
+ localRemoteRecord = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
+ deepEqual(localRemoteRecord.commands, [], "Should be empty");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+
+ // Reset service (remove mocks)
+ engine = Service.clientsEngine = new ClientEngine(Service);
+ engine._resetClient();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_deleted_commands() {
+ _("Verifies commands for a deleted client are discarded");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ let activeID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({
+ id: activeID,
+ name: "Active client",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ let deletedID = Utils.makeGUID();
+ server.insertWBO("foo", "clients", new ServerWBO(deletedID, encryptPayload({
+ id: deletedID,
+ name: "Client to delete",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"],
+ }), now - 10));
+
+ try {
+ let store = engine._store;
+
+ _("First sync. 2 records downloaded.");
+ engine._sync();
+
+ _("Delete a record on the server.");
+ let collection = server.getCollection("foo", "clients");
+ collection.remove(deletedID);
+
+ _("Broadcast a command to all clients");
+ engine.sendCommand("logout", []);
+ engine._sync();
+
+ deepEqual(collection.keys().sort(), [activeID, engine.localID].sort(),
+ "Should not reupload deleted clients");
+
+ let activePayload = JSON.parse(JSON.parse(collection.payload(activeID)).ciphertext);
+ deepEqual(activePayload.commands, [{ command: "logout", args: [] }],
+ "Should send the command to the active client");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_send_uri_ack() {
+ _("Ensure a sent URI is deleted when the client syncs");
+
+ let now = Date.now() / 1000;
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ let user = server.user("foo");
+
+ new SyncTestingInfrastructure(server.server);
+ generateNewKeys(Service.collectionKeys);
+
+ try {
+ let fakeSenderID = Utils.makeGUID();
+
+ _("Initial sync for empty clients collection");
+ engine._sync();
+ let collection = server.getCollection("foo", "clients");
+ let ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
+ ok(ourPayload, "Should upload our client record");
+
+ _("Send a URL to the device on the server");
+ ourPayload.commands = [{
+ command: "displayURI",
+ args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
+ }];
+ server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload(ourPayload), now));
+
+ _("Sync again");
+ engine._sync();
+ deepEqual(engine.localCommands, [{
+ command: "displayURI",
+ args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
+ }], "Should receive incoming URI");
+ ok(engine.processIncomingCommands(), "Should process incoming commands");
+ const clearedCommands = engine._readCommands()[engine.localID];
+ deepEqual(clearedCommands, [{
+ command: "displayURI",
+ args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
+ }], "Should mark the commands as cleared after processing");
+
+ _("Check that the command was removed on the server");
+ engine._sync();
+ ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
+ ok(ourPayload, "Should upload the synced client record");
+ deepEqual(ourPayload.commands, [], "Should not reupload cleared commands");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ engine._resetClient();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_command_sync() {
+ _("Notify other clients when writing their record.");
+
+ engine._store.wipe();
+ generateNewKeys(Service.collectionKeys);
+
+ let contents = {
+ meta: {global: {engines: {clients: {version: engine.version,
+ syncID: engine.syncID}}}},
+ clients: {},
+ crypto: {}
+ };
+ let server = serverForUsers({"foo": "password"}, contents);
+ new SyncTestingInfrastructure(server.server);
+
+ let user = server.user("foo");
+ let collection = server.getCollection("foo", "clients");
+ let remoteId = Utils.makeGUID();
+ let remoteId2 = Utils.makeGUID();
+
+ function clientWBO(id) {
+ return user.collection("clients").wbo(id);
+ }
+
+ _("Create remote client record 1");
+ server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({
+ id: remoteId,
+ name: "Remote client",
+ type: "desktop",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"]
+ }), Date.now() / 1000));
+
+ _("Create remote client record 2");
+ server.insertWBO("foo", "clients", new ServerWBO(remoteId2, encryptPayload({
+ id: remoteId2,
+ name: "Remote client 2",
+ type: "mobile",
+ commands: [],
+ version: "48",
+ protocols: ["1.5"]
+ }), Date.now() / 1000));
+
+ try {
+ equal(collection.count(), 2, "2 remote records written");
+ engine._sync();
+ equal(collection.count(), 3, "3 remote records written (+1 for the synced local record)");
+
+ let notifiedIds;
+ engine.sendCommand("wipeAll", []);
+ engine._tracker.addChangedID(engine.localID);
+ engine.getClientFxaDeviceId = (id) => "fxa-" + id;
+ engine._notifyCollectionChanged = (ids) => (notifiedIds = ids);
+ _("Syncing.");
+ engine._sync();
+ deepEqual(notifiedIds, ["fxa-fake-guid-00","fxa-fake-guid-01"]);
+ ok(!notifiedIds.includes(engine.getClientFxaDeviceId(engine.localID)),
+ "We never notify the local device");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+
+ try {
+ server.deleteCollections("foo");
+ } finally {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Clients").level = Log.Level.Trace;
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_clients_escape.js b/services/sync/tests/unit/test_clients_escape.js
new file mode 100644
index 000000000..8c8cd63e3
--- /dev/null
+++ b/services/sync/tests/unit/test_clients_escape.js
@@ -0,0 +1,64 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ _("Set up test fixtures.");
+
+ ensureLegacyIdentityManager();
+ Service.identity.username = "john@example.com";
+ Service.clusterURL = "http://fakebase/";
+ let baseUri = "http://fakebase/1.1/foo/storage/";
+ let pubUri = baseUri + "keys/pubkey";
+ let privUri = baseUri + "keys/privkey";
+
+ Service.identity.syncKey = "abcdeabcdeabcdeabcdeabcdea";
+ let keyBundle = Service.identity.syncKeyBundle;
+
+ let engine = Service.clientsEngine;
+
+ try {
+ _("Test that serializing client records results in uploadable ascii");
+ engine.localID = "ascii";
+ engine.localName = "wéävê";
+
+ _("Make sure we have the expected record");
+ let record = engine._createRecord("ascii");
+ do_check_eq(record.id, "ascii");
+ do_check_eq(record.name, "wéävê");
+
+ _("Encrypting record...");
+ record.encrypt(keyBundle);
+ _("Encrypted.");
+
+ let serialized = JSON.stringify(record);
+ let checkCount = 0;
+ _("Checking for all ASCII:", serialized);
+ Array.forEach(serialized, function(ch) {
+ let code = ch.charCodeAt(0);
+ _("Checking asciiness of '", ch, "'=", code);
+ do_check_true(code < 128);
+ checkCount++;
+ });
+
+ _("Processed", checkCount, "characters out of", serialized.length);
+ do_check_eq(checkCount, serialized.length);
+
+ _("Making sure the record still looks like it did before");
+ record.decrypt(keyBundle);
+ do_check_eq(record.id, "ascii");
+ do_check_eq(record.name, "wéävê");
+
+ _("Sanity check that creating the record also gives the same");
+ record = engine._createRecord("ascii");
+ do_check_eq(record.id, "ascii");
+ do_check_eq(record.name, "wéävê");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
diff --git a/services/sync/tests/unit/test_collection_getBatched.js b/services/sync/tests/unit/test_collection_getBatched.js
new file mode 100644
index 000000000..c6523d497
--- /dev/null
+++ b/services/sync/tests/unit/test_collection_getBatched.js
@@ -0,0 +1,195 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Collection").level = Log.Level.Trace;
+ run_next_test();
+}
+
+function recordRange(lim, offset, total) {
+ let res = [];
+ for (let i = offset; i < Math.min(lim + offset, total); ++i) {
+ res.push(JSON.stringify({ id: String(i), payload: "test:" + i }));
+ }
+ return res.join("\n") + "\n";
+}
+
+function get_test_collection_info({ totalRecords, batchSize, lastModified,
+ throwAfter = Infinity,
+ interruptedAfter = Infinity }) {
+ let coll = new Collection("http://example.com/test/", WBORecord, Service);
+ coll.full = true;
+ let requests = [];
+ let responses = [];
+ let sawRecord = false;
+ coll.get = function() {
+ ok(!sawRecord); // make sure we call record handler after all requests.
+ let limit = +this.limit;
+ let offset = 0;
+ if (this.offset) {
+ equal(this.offset.slice(0, 6), "foobar");
+ offset = +this.offset.slice(6);
+ }
+ requests.push({
+ limit,
+ offset,
+ spec: this.spec,
+ headers: Object.assign({}, this.headers)
+ });
+ if (--throwAfter === 0) {
+ throw "Some Network Error";
+ }
+ let body = recordRange(limit, offset, totalRecords);
+ this._onProgress.call({ _data: body });
+ let response = {
+ body,
+ success: true,
+ status: 200,
+ headers: {}
+ };
+ if (--interruptedAfter === 0) {
+ response.success = false;
+ response.status = 412;
+ response.body = "";
+ } else if (offset + limit < totalRecords) {
+ // Ensure we're treating this as an opaque string, since the docs say
+ // it might not be numeric.
+ response.headers["x-weave-next-offset"] = "foobar" + (offset + batchSize);
+ }
+ response.headers["x-last-modified"] = lastModified;
+ responses.push(response);
+ return response;
+ };
+
+ let records = [];
+ coll.recordHandler = function(record) {
+ sawRecord = true;
+ // ensure records are coming in in the right order
+ equal(record.id, String(records.length));
+ equal(record.payload, "test:" + records.length);
+ records.push(record);
+ };
+ return { records, responses, requests, coll };
+}
+
+add_test(function test_success() {
+ const totalRecords = 11;
+ const batchSize = 2;
+ const lastModified = "111111";
+ let { records, responses, requests, coll } = get_test_collection_info({
+ totalRecords,
+ batchSize,
+ lastModified,
+ });
+ let response = coll.getBatched(batchSize);
+
+ equal(requests.length, Math.ceil(totalRecords / batchSize));
+
+ // records are mostly checked in recordHandler, we just care about the length
+ equal(records.length, totalRecords);
+
+ // ensure we're returning the last response
+ equal(responses[responses.length - 1], response);
+
+ // check first separately since its a bit of a special case
+ ok(!requests[0].headers["x-if-unmodified-since"]);
+ ok(!requests[0].offset);
+ equal(requests[0].limit, batchSize);
+ let expectedOffset = 2;
+ for (let i = 1; i < requests.length; ++i) {
+ let req = requests[i];
+ equal(req.headers["x-if-unmodified-since"], lastModified);
+ equal(req.limit, batchSize);
+ if (i !== requests.length - 1) {
+ equal(req.offset, expectedOffset);
+ }
+
+ expectedOffset += batchSize;
+ }
+
+ // ensure we cleaned up anything that would break further
+ // use of this collection.
+ ok(!coll._headers["x-if-unmodified-since"]);
+ ok(!coll.offset);
+ ok(!coll.limit || (coll.limit == Infinity));
+
+ run_next_test();
+});
+
+add_test(function test_total_limit() {
+ _("getBatched respects the (initial) value of the limit property");
+ const totalRecords = 100;
+ const recordLimit = 11;
+ const batchSize = 2;
+ const lastModified = "111111";
+ let { records, responses, requests, coll } = get_test_collection_info({
+ totalRecords,
+ batchSize,
+ lastModified,
+ });
+ coll.limit = recordLimit;
+ let response = coll.getBatched(batchSize);
+
+ equal(requests.length, Math.ceil(recordLimit / batchSize));
+ equal(records.length, recordLimit);
+
+ for (let i = 0; i < requests.length; ++i) {
+ let req = requests[i];
+ if (i !== requests.length - 1) {
+ equal(req.limit, batchSize);
+ } else {
+ equal(req.limit, recordLimit % batchSize);
+ }
+ }
+
+ equal(coll._limit, recordLimit);
+
+ run_next_test();
+});
+
+add_test(function test_412() {
+ _("We shouldn't record records if we get a 412 in the middle of a batch");
+ const totalRecords = 11;
+ const batchSize = 2;
+ const lastModified = "111111";
+ let { records, responses, requests, coll } = get_test_collection_info({
+ totalRecords,
+ batchSize,
+ lastModified,
+ interruptedAfter: 3
+ });
+ let response = coll.getBatched(batchSize);
+
+ equal(requests.length, 3);
+ equal(records.length, 0); // record handler shouldn't be called for anything
+
+ // ensure we're returning the last response
+ equal(responses[responses.length - 1], response);
+
+ ok(!response.success);
+ equal(response.status, 412);
+ run_next_test();
+});
+
+add_test(function test_get_throws() {
+ _("We shouldn't record records if get() throws for some reason");
+ const totalRecords = 11;
+ const batchSize = 2;
+ const lastModified = "111111";
+ let { records, responses, requests, coll } = get_test_collection_info({
+ totalRecords,
+ batchSize,
+ lastModified,
+ throwAfter: 3
+ });
+
+ throws(() => coll.getBatched(batchSize), "Some Network Error");
+
+ equal(requests.length, 3);
+ equal(records.length, 0);
+ run_next_test();
+});
diff --git a/services/sync/tests/unit/test_collection_inc_get.js b/services/sync/tests/unit/test_collection_inc_get.js
new file mode 100644
index 000000000..7747c0ef3
--- /dev/null
+++ b/services/sync/tests/unit/test_collection_inc_get.js
@@ -0,0 +1,188 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Make sure Collection can correctly incrementally parse GET requests");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ let base = "http://fake/";
+ let coll = new Collection("http://fake/uri/", WBORecord, Service);
+ let stream = { _data: "" };
+ let called, recCount, sum;
+
+ _("Not-JSON, string payloads are strings");
+ called = false;
+ stream._data = '{"id":"hello","payload":"world"}\n';
+ coll.recordHandler = function(rec) {
+ called = true;
+ _("Got record:", JSON.stringify(rec));
+ rec.collection = "uri"; // This would be done by an engine, so do it here.
+ do_check_eq(rec.collection, "uri");
+ do_check_eq(rec.id, "hello");
+ do_check_eq(rec.uri(base).spec, "http://fake/uri/hello");
+ do_check_eq(rec.payload, "world");
+ };
+ coll._onProgress.call(stream);
+ do_check_eq(stream._data, '');
+ do_check_true(called);
+ _("\n");
+
+
+ _("Parse record with payload");
+ called = false;
+ stream._data = '{"payload":"{\\"value\\":123}"}\n';
+ coll.recordHandler = function(rec) {
+ called = true;
+ _("Got record:", JSON.stringify(rec));
+ do_check_eq(rec.payload.value, 123);
+ };
+ coll._onProgress.call(stream);
+ do_check_eq(stream._data, '');
+ do_check_true(called);
+ _("\n");
+
+
+ _("Parse multiple records in one go");
+ called = false;
+ recCount = 0;
+ sum = 0;
+ stream._data = '{"id":"hundred","payload":"{\\"value\\":100}"}\n{"id":"ten","payload":"{\\"value\\":10}"}\n{"id":"one","payload":"{\\"value\\":1}"}\n';
+ coll.recordHandler = function(rec) {
+ called = true;
+ _("Got record:", JSON.stringify(rec));
+ recCount++;
+ sum += rec.payload.value;
+ _("Incremental status: count", recCount, "sum", sum);
+ rec.collection = "uri";
+ switch (recCount) {
+ case 1:
+ do_check_eq(rec.id, "hundred");
+ do_check_eq(rec.uri(base).spec, "http://fake/uri/hundred");
+ do_check_eq(rec.payload.value, 100);
+ do_check_eq(sum, 100);
+ break;
+ case 2:
+ do_check_eq(rec.id, "ten");
+ do_check_eq(rec.uri(base).spec, "http://fake/uri/ten");
+ do_check_eq(rec.payload.value, 10);
+ do_check_eq(sum, 110);
+ break;
+ case 3:
+ do_check_eq(rec.id, "one");
+ do_check_eq(rec.uri(base).spec, "http://fake/uri/one");
+ do_check_eq(rec.payload.value, 1);
+ do_check_eq(sum, 111);
+ break;
+ default:
+ do_throw("unexpected number of record counts", recCount);
+ break;
+ }
+ };
+ coll._onProgress.call(stream);
+ do_check_eq(recCount, 3);
+ do_check_eq(sum, 111);
+ do_check_eq(stream._data, '');
+ do_check_true(called);
+ _("\n");
+
+
+ _("Handle incremental data incoming");
+ called = false;
+ recCount = 0;
+ sum = 0;
+ stream._data = '{"payl';
+ coll.recordHandler = function(rec) {
+ called = true;
+ do_throw("shouldn't have gotten a record..");
+ };
+ coll._onProgress.call(stream);
+ _("shouldn't have gotten anything yet");
+ do_check_eq(recCount, 0);
+ do_check_eq(sum, 0);
+ _("leading array bracket should have been trimmed");
+ do_check_eq(stream._data, '{"payl');
+ do_check_false(called);
+ _();
+
+ _("adding more data enough for one record..");
+ called = false;
+ stream._data += 'oad":"{\\"value\\":100}"}\n';
+ coll.recordHandler = function(rec) {
+ called = true;
+ _("Got record:", JSON.stringify(rec));
+ recCount++;
+ sum += rec.payload.value;
+ };
+ coll._onProgress.call(stream);
+ _("should have 1 record with sum 100");
+ do_check_eq(recCount, 1);
+ do_check_eq(sum, 100);
+ _("all data should have been consumed including trailing comma");
+ do_check_eq(stream._data, '');
+ do_check_true(called);
+ _();
+
+ _("adding more data..");
+ called = false;
+ stream._data += '{"payload":"{\\"value\\":10}"';
+ coll.recordHandler = function(rec) {
+ called = true;
+ do_throw("shouldn't have gotten a record..");
+ };
+ coll._onProgress.call(stream);
+ _("should still have 1 record with sum 100");
+ do_check_eq(recCount, 1);
+ do_check_eq(sum, 100);
+ _("should almost have a record");
+ do_check_eq(stream._data, '{"payload":"{\\"value\\":10}"');
+ do_check_false(called);
+ _();
+
+ _("add data for two records..");
+ called = false;
+ stream._data += '}\n{"payload":"{\\"value\\":1}"}\n';
+ coll.recordHandler = function(rec) {
+ called = true;
+ _("Got record:", JSON.stringify(rec));
+ recCount++;
+ sum += rec.payload.value;
+ switch (recCount) {
+ case 2:
+ do_check_eq(rec.payload.value, 10);
+ do_check_eq(sum, 110);
+ break;
+ case 3:
+ do_check_eq(rec.payload.value, 1);
+ do_check_eq(sum, 111);
+ break;
+ default:
+ do_throw("unexpected number of record counts", recCount);
+ break;
+ }
+ };
+ coll._onProgress.call(stream);
+ _("should have gotten all 3 records with sum 111");
+ do_check_eq(recCount, 3);
+ do_check_eq(sum, 111);
+ _("should have consumed all data");
+ do_check_eq(stream._data, '');
+ do_check_true(called);
+ _();
+
+ _("add no extra data");
+ called = false;
+ stream._data += '';
+ coll.recordHandler = function(rec) {
+ called = true;
+ do_throw("shouldn't have gotten a record..");
+ };
+ coll._onProgress.call(stream);
+ _("should still have 3 records with sum 111");
+ do_check_eq(recCount, 3);
+ do_check_eq(sum, 111);
+ _("should have consumed nothing but still have nothing");
+ do_check_eq(stream._data, "");
+ do_check_false(called);
+ _("\n");
+}
diff --git a/services/sync/tests/unit/test_collections_recovery.js b/services/sync/tests/unit/test_collections_recovery.js
new file mode 100644
index 000000000..0e7f54676
--- /dev/null
+++ b/services/sync/tests/unit/test_collections_recovery.js
@@ -0,0 +1,85 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// Verify that we wipe the server if we have to regenerate keys.
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+add_identity_test(this, function* test_missing_crypto_collection() {
+ let johnHelper = track_collections_helper();
+ let johnU = johnHelper.with_updated_collection;
+ let johnColls = johnHelper.collections;
+
+ let empty = false;
+ function maybe_empty(handler) {
+ return function (request, response) {
+ if (empty) {
+ let body = "{}";
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ } else {
+ handler(request, response);
+ }
+ };
+ }
+
+ yield configureIdentity({username: "johndoe"});
+
+ let handlers = {
+ "/1.1/johndoe/info/collections": maybe_empty(johnHelper.handler),
+ "/1.1/johndoe/storage/crypto/keys": johnU("crypto", new ServerWBO("keys").handler()),
+ "/1.1/johndoe/storage/meta/global": johnU("meta", new ServerWBO("global").handler())
+ };
+ let collections = ["clients", "bookmarks", "forms", "history",
+ "passwords", "prefs", "tabs"];
+ // Disable addon sync because AddonManager won't be initialized here.
+ Service.engineManager.unregister("addons");
+
+ for (let coll of collections) {
+ handlers["/1.1/johndoe/storage/" + coll] =
+ johnU(coll, new ServerCollection({}, true).handler());
+ }
+ let server = httpd_setup(handlers);
+ Service.serverURL = server.baseURI;
+
+ try {
+ let fresh = 0;
+ let orig = Service._freshStart;
+ Service._freshStart = function() {
+ _("Called _freshStart.");
+ orig.call(Service);
+ fresh++;
+ };
+
+ _("Startup, no meta/global: freshStart called once.");
+ yield sync_and_validate_telem();
+ do_check_eq(fresh, 1);
+ fresh = 0;
+
+ _("Regular sync: no need to freshStart.");
+ Service.sync();
+ do_check_eq(fresh, 0);
+
+ _("Simulate a bad info/collections.");
+ delete johnColls.crypto;
+ yield sync_and_validate_telem();
+ do_check_eq(fresh, 1);
+ fresh = 0;
+
+ _("Regular sync: no need to freshStart.");
+ yield sync_and_validate_telem();
+ do_check_eq(fresh, 0);
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ yield deferred.promise;
+ }
+});
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_corrupt_keys.js b/services/sync/tests/unit/test_corrupt_keys.js
new file mode 100644
index 000000000..009461c2a
--- /dev/null
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -0,0 +1,233 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/tabs.js");
+Cu.import("resource://services-sync/engines/history.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/Promise.jsm");
+
+add_task(function* test_locally_changed_keys() {
+ let passphrase = "abcdeabcdeabcdeabcdeabcdea";
+
+ let hmacErrorCount = 0;
+ function counting(f) {
+ return function() {
+ hmacErrorCount++;
+ return f.call(this);
+ };
+ }
+
+ Service.handleHMACEvent = counting(Service.handleHMACEvent);
+
+ let server = new SyncServer();
+ let johndoe = server.registerUser("johndoe", "password");
+ johndoe.createContents({
+ meta: {},
+ crypto: {},
+ clients: {}
+ });
+ server.start();
+
+ try {
+ Svc.Prefs.set("registerEngines", "Tab");
+ _("Set up some tabs.");
+ let myTabs =
+ {windows: [{tabs: [{index: 1,
+ entries: [{
+ url: "http://foo.com/",
+ title: "Title"
+ }],
+ attributes: {
+ image: "image"
+ }
+ }]}]};
+ delete Svc.Session;
+ Svc.Session = {
+ getBrowserState: () => JSON.stringify(myTabs)
+ };
+
+ setBasicCredentials("johndoe", "password", passphrase);
+ Service.serverURL = server.baseURI;
+ Service.clusterURL = server.baseURI;
+
+ Service.engineManager.register(HistoryEngine);
+ Service.engineManager.unregister("addons");
+
+ function corrupt_local_keys() {
+ Service.collectionKeys._default.keyPair = [Svc.Crypto.generateRandomKey(),
+ Svc.Crypto.generateRandomKey()];
+ }
+
+ _("Setting meta.");
+
+ // Bump version on the server.
+ let m = new WBORecord("meta", "global");
+ m.payload = {"syncID": "foooooooooooooooooooooooooo",
+ "storageVersion": STORAGE_VERSION};
+ m.upload(Service.resource(Service.metaURL));
+
+ _("New meta/global: " + JSON.stringify(johndoe.collection("meta").wbo("global")));
+
+ // Upload keys.
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+
+ // Check that login works.
+ do_check_true(Service.login("johndoe", "ilovejane", passphrase));
+ do_check_true(Service.isLoggedIn);
+
+ // Sync should upload records.
+ yield sync_and_validate_telem();
+
+ // Tabs exist.
+ _("Tabs modified: " + johndoe.modified("tabs"));
+ do_check_true(johndoe.modified("tabs") > 0);
+
+ let coll_modified = Service.collectionKeys.lastModified;
+
+ // Let's create some server side history records.
+ let liveKeys = Service.collectionKeys.keyForCollection("history");
+ _("Keys now: " + liveKeys.keyPair);
+ let visitType = Ci.nsINavHistoryService.TRANSITION_LINK;
+ let history = johndoe.createCollection("history");
+ for (let i = 0; i < 5; i++) {
+ let id = 'record-no--' + i;
+ let modified = Date.now()/1000 - 60*(i+10);
+
+ let w = new CryptoWrapper("history", "id");
+ w.cleartext = {
+ id: id,
+ histUri: "http://foo/bar?" + id,
+ title: id,
+ sortindex: i,
+ visits: [{date: (modified - 5) * 1000000, type: visitType}],
+ deleted: false};
+ w.encrypt(liveKeys);
+
+ let payload = {ciphertext: w.ciphertext,
+ IV: w.IV,
+ hmac: w.hmac};
+ history.insert(id, payload, modified);
+ }
+
+ history.timestamp = Date.now() / 1000;
+ let old_key_time = johndoe.modified("crypto");
+ _("Old key time: " + old_key_time);
+
+ // Check that we can decrypt one.
+ let rec = new CryptoWrapper("history", "record-no--0");
+ rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
+ _(JSON.stringify(rec));
+ do_check_true(!!rec.decrypt(liveKeys));
+
+ do_check_eq(hmacErrorCount, 0);
+
+ // Fill local key cache with bad data.
+ corrupt_local_keys();
+ _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
+
+ do_check_eq(hmacErrorCount, 0);
+
+ _("HMAC error count: " + hmacErrorCount);
+ // Now syncing should succeed, after one HMAC error.
+ let ping = yield wait_for_ping(() => Service.sync(), true);
+ equal(ping.engines.find(e => e.name == "history").incoming.applied, 5);
+
+ do_check_eq(hmacErrorCount, 1);
+ _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
+
+ // And look! We downloaded history!
+ let store = Service.engineManager.get("history")._store;
+ do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--0"));
+ do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--1"));
+ do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--2"));
+ do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--3"));
+ do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--4"));
+ do_check_eq(hmacErrorCount, 1);
+
+ _("Busting some new server values.");
+ // Now what happens if we corrupt the HMAC on the server?
+ for (let i = 5; i < 10; i++) {
+ let id = 'record-no--' + i;
+ let modified = 1 + (Date.now() / 1000);
+
+ let w = new CryptoWrapper("history", "id");
+ w.cleartext = {
+ id: id,
+ histUri: "http://foo/bar?" + id,
+ title: id,
+ sortindex: i,
+ visits: [{date: (modified - 5 ) * 1000000, type: visitType}],
+ deleted: false};
+ w.encrypt(Service.collectionKeys.keyForCollection("history"));
+ w.hmac = w.hmac.toUpperCase();
+
+ let payload = {ciphertext: w.ciphertext,
+ IV: w.IV,
+ hmac: w.hmac};
+ history.insert(id, payload, modified);
+ }
+ history.timestamp = Date.now() / 1000;
+
+ _("Server key time hasn't changed.");
+ do_check_eq(johndoe.modified("crypto"), old_key_time);
+
+ _("Resetting HMAC error timer.");
+ Service.lastHMACEvent = 0;
+
+ _("Syncing...");
+ ping = yield sync_and_validate_telem(true);
+
+ do_check_eq(ping.engines.find(e => e.name == "history").incoming.failed, 5);
+ _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
+ _("Server keys have been updated, and we skipped over 5 more HMAC errors without adjusting history.");
+ do_check_true(johndoe.modified("crypto") > old_key_time);
+ do_check_eq(hmacErrorCount, 6);
+ do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--5"));
+ do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--6"));
+ do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--7"));
+ do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--8"));
+ do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--9"));
+ } finally {
+ Svc.Prefs.resetBranch("");
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ yield deferred.promise;
+ }
+});
+
+function run_test() {
+ let logger = Log.repository.rootLogger;
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+ validate_all_future_pings();
+
+ ensureLegacyIdentityManager();
+
+ run_next_test();
+}
+
+/**
+ * Asynchronously check a url is visited.
+ * @param url the url
+ * @return {Promise}
+ * @resolves When the check has been added successfully.
+ * @rejects JavaScript exception.
+ */
+function promiseIsURIVisited(url) {
+ let deferred = Promise.defer();
+ PlacesUtils.asyncHistory.isURIVisited(Utils.makeURI(url), function(aURI, aIsVisited) {
+ deferred.resolve(aIsVisited);
+ });
+
+ return deferred.promise;
+}
diff --git a/services/sync/tests/unit/test_declined.js b/services/sync/tests/unit/test_declined.js
new file mode 100644
index 000000000..e9e9b002a
--- /dev/null
+++ b/services/sync/tests/unit/test_declined.js
@@ -0,0 +1,153 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/stages/declined.js");
+Cu.import("resource://services-sync/stages/enginesync.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-common/observers.js");
+
+function run_test() {
+ run_next_test();
+}
+
+function PetrolEngine() {}
+PetrolEngine.prototype.name = "petrol";
+
+function DieselEngine() {}
+DieselEngine.prototype.name = "diesel";
+
+function DummyEngine() {}
+DummyEngine.prototype.name = "dummy";
+
+function ActualEngine() {}
+ActualEngine.prototype = {__proto__: Engine.prototype,
+ name: 'actual'};
+
+function getEngineManager() {
+ let manager = new EngineManager(Service);
+ Service.engineManager = manager;
+ manager._engines = {
+ "petrol": new PetrolEngine(),
+ "diesel": new DieselEngine(),
+ "dummy": new DummyEngine(),
+ "actual": new ActualEngine(),
+ };
+ return manager;
+}
+
+/**
+ * 'Fetch' a meta/global record that doesn't mention declined.
+ *
+ * Push it into the EngineSynchronizer to set enabled; verify that those are
+ * correct.
+ *
+ * Then push it into DeclinedEngines to set declined; verify that none are
+ * declined, and a notification is sent for our locally disabled-but-not-
+ * declined engines.
+ */
+add_test(function testOldMeta() {
+ let meta = {
+ payload: {
+ engines: {
+ "petrol": 1,
+ "diesel": 2,
+ "nonlocal": 3, // Enabled but not supported.
+ },
+ },
+ };
+
+ _("Record: " + JSON.stringify(meta));
+
+ let manager = getEngineManager();
+
+ // Update enabled from meta/global.
+ let engineSync = new EngineSynchronizer(Service);
+ engineSync._updateEnabledFromMeta(meta, 3, manager);
+
+ Assert.ok(manager._engines["petrol"].enabled, "'petrol' locally enabled.");
+ Assert.ok(manager._engines["diesel"].enabled, "'diesel' locally enabled.");
+ Assert.ok(!("nonlocal" in manager._engines), "We don't know anything about the 'nonlocal' engine.");
+ Assert.ok(!manager._engines["actual"].enabled, "'actual' not locally enabled.");
+ Assert.ok(!manager.isDeclined("actual"), "'actual' not declined, though.");
+
+ let declinedEngines = new DeclinedEngines(Service);
+
+ function onNotDeclined(subject, topic, data) {
+ Observers.remove("weave:engines:notdeclined", onNotDeclined);
+ Assert.ok(subject.undecided.has("actual"), "EngineManager observed that 'actual' was undecided.");
+
+ let declined = manager.getDeclined();
+ _("Declined: " + JSON.stringify(declined));
+
+ Assert.ok(!meta.changed, "No need to upload a new meta/global.");
+ run_next_test();
+ }
+
+ Observers.add("weave:engines:notdeclined", onNotDeclined);
+
+ declinedEngines.updateDeclined(meta, manager);
+});
+
+/**
+ * 'Fetch' a meta/global that declines an engine we don't
+ * recognize. Ensure that we track that declined engine along
+ * with any we locally declined, and that the meta/global
+ * record is marked as changed and includes all declined
+ * engines.
+ */
+add_test(function testDeclinedMeta() {
+ let meta = {
+ payload: {
+ engines: {
+ "petrol": 1,
+ "diesel": 2,
+ "nonlocal": 3, // Enabled but not supported.
+ },
+ declined: ["nonexistent"], // Declined and not supported.
+ },
+ };
+
+ _("Record: " + JSON.stringify(meta));
+
+ let manager = getEngineManager();
+ manager._engines["petrol"].enabled = true;
+ manager._engines["diesel"].enabled = true;
+ manager._engines["dummy"].enabled = true;
+ manager._engines["actual"].enabled = false; // Disabled but not declined.
+
+ manager.decline(["localdecline"]); // Declined and not supported.
+
+ let declinedEngines = new DeclinedEngines(Service);
+
+ function onNotDeclined(subject, topic, data) {
+ Observers.remove("weave:engines:notdeclined", onNotDeclined);
+ Assert.ok(subject.undecided.has("actual"), "EngineManager observed that 'actual' was undecided.");
+
+ let declined = manager.getDeclined();
+ _("Declined: " + JSON.stringify(declined));
+
+ Assert.equal(declined.indexOf("actual"), -1, "'actual' is locally disabled, but not marked as declined.");
+
+ Assert.equal(declined.indexOf("clients"), -1, "'clients' is enabled and not remotely declined.");
+ Assert.equal(declined.indexOf("petrol"), -1, "'petrol' is enabled and not remotely declined.");
+ Assert.equal(declined.indexOf("diesel"), -1, "'diesel' is enabled and not remotely declined.");
+ Assert.equal(declined.indexOf("dummy"), -1, "'dummy' is enabled and not remotely declined.");
+
+ Assert.ok(0 <= declined.indexOf("nonexistent"), "'nonexistent' was declined on the server.");
+
+ Assert.ok(0 <= declined.indexOf("localdecline"), "'localdecline' was declined locally.");
+
+ // The meta/global is modified, too.
+ Assert.ok(0 <= meta.payload.declined.indexOf("nonexistent"), "meta/global's declined contains 'nonexistent'.");
+ Assert.ok(0 <= meta.payload.declined.indexOf("localdecline"), "meta/global's declined contains 'localdecline'.");
+ Assert.strictEqual(true, meta.changed, "meta/global was changed.");
+
+ run_next_test();
+ }
+
+ Observers.add("weave:engines:notdeclined", onNotDeclined);
+
+ declinedEngines.updateDeclined(meta, manager);
+});
+
diff --git a/services/sync/tests/unit/test_engine.js b/services/sync/tests/unit/test_engine.js
new file mode 100644
index 000000000..be637efc8
--- /dev/null
+++ b/services/sync/tests/unit/test_engine.js
@@ -0,0 +1,219 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+function SteamStore(engine) {
+ Store.call(this, "Steam", engine);
+ this.wasWiped = false;
+}
+SteamStore.prototype = {
+ __proto__: Store.prototype,
+
+ wipe: function() {
+ this.wasWiped = true;
+ }
+};
+
+function SteamTracker(name, engine) {
+ Tracker.call(this, name || "Steam", engine);
+}
+SteamTracker.prototype = {
+ __proto__: Tracker.prototype
+};
+
+function SteamEngine(name, service) {
+ Engine.call(this, name, service);
+ this.wasReset = false;
+ this.wasSynced = false;
+}
+SteamEngine.prototype = {
+ __proto__: Engine.prototype,
+ _storeObj: SteamStore,
+ _trackerObj: SteamTracker,
+
+ _resetClient: function () {
+ this.wasReset = true;
+ },
+
+ _sync: function () {
+ this.wasSynced = true;
+ }
+};
+
+var engineObserver = {
+ topics: [],
+
+ observe: function(subject, topic, data) {
+ do_check_eq(data, "steam");
+ this.topics.push(topic);
+ },
+
+ reset: function() {
+ this.topics = [];
+ }
+};
+Observers.add("weave:engine:reset-client:start", engineObserver);
+Observers.add("weave:engine:reset-client:finish", engineObserver);
+Observers.add("weave:engine:wipe-client:start", engineObserver);
+Observers.add("weave:engine:wipe-client:finish", engineObserver);
+Observers.add("weave:engine:sync:start", engineObserver);
+Observers.add("weave:engine:sync:finish", engineObserver);
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_members() {
+ _("Engine object members");
+ let engine = new SteamEngine("Steam", Service);
+ do_check_eq(engine.Name, "Steam");
+ do_check_eq(engine.prefName, "steam");
+ do_check_true(engine._store instanceof SteamStore);
+ do_check_true(engine._tracker instanceof SteamTracker);
+ run_next_test();
+});
+
+add_test(function test_score() {
+ _("Engine.score corresponds to tracker.score and is readonly");
+ let engine = new SteamEngine("Steam", Service);
+ do_check_eq(engine.score, 0);
+ engine._tracker.score += 5;
+ do_check_eq(engine.score, 5);
+
+ try {
+ engine.score = 10;
+ } catch(ex) {
+ // Setting an attribute that has a getter produces an error in
+ // Firefox <= 3.6 and is ignored in later versions. Either way,
+ // the attribute's value won't change.
+ }
+ do_check_eq(engine.score, 5);
+ run_next_test();
+});
+
+add_test(function test_resetClient() {
+ _("Engine.resetClient calls _resetClient");
+ let engine = new SteamEngine("Steam", Service);
+ do_check_false(engine.wasReset);
+
+ engine.resetClient();
+ do_check_true(engine.wasReset);
+ do_check_eq(engineObserver.topics[0], "weave:engine:reset-client:start");
+ do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:finish");
+
+ engine.wasReset = false;
+ engineObserver.reset();
+ run_next_test();
+});
+
+add_test(function test_invalidChangedIDs() {
+ _("Test that invalid changed IDs on disk don't end up live.");
+ let engine = new SteamEngine("Steam", Service);
+ let tracker = engine._tracker;
+ tracker.changedIDs = 5;
+ tracker.saveChangedIDs(function onSaved() {
+ tracker.changedIDs = {placeholder: true};
+ tracker.loadChangedIDs(function onLoaded(json) {
+ do_check_null(json);
+ do_check_true(tracker.changedIDs.placeholder);
+ run_next_test();
+ });
+ });
+});
+
+add_test(function test_wipeClient() {
+ _("Engine.wipeClient calls resetClient, wipes store, clears changed IDs");
+ let engine = new SteamEngine("Steam", Service);
+ do_check_false(engine.wasReset);
+ do_check_false(engine._store.wasWiped);
+ do_check_true(engine._tracker.addChangedID("a-changed-id"));
+ do_check_true("a-changed-id" in engine._tracker.changedIDs);
+
+ engine.wipeClient();
+ do_check_true(engine.wasReset);
+ do_check_true(engine._store.wasWiped);
+ do_check_eq(JSON.stringify(engine._tracker.changedIDs), "{}");
+ do_check_eq(engineObserver.topics[0], "weave:engine:wipe-client:start");
+ do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:start");
+ do_check_eq(engineObserver.topics[2], "weave:engine:reset-client:finish");
+ do_check_eq(engineObserver.topics[3], "weave:engine:wipe-client:finish");
+
+ engine.wasReset = false;
+ engine._store.wasWiped = false;
+ engineObserver.reset();
+ run_next_test();
+});
+
+add_test(function test_enabled() {
+ _("Engine.enabled corresponds to preference");
+ let engine = new SteamEngine("Steam", Service);
+ try {
+ do_check_false(engine.enabled);
+ Svc.Prefs.set("engine.steam", true);
+ do_check_true(engine.enabled);
+
+ engine.enabled = false;
+ do_check_false(Svc.Prefs.get("engine.steam"));
+ run_next_test();
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+});
+
+add_test(function test_sync() {
+ let engine = new SteamEngine("Steam", Service);
+ try {
+ _("Engine.sync doesn't call _sync if it's not enabled");
+ do_check_false(engine.enabled);
+ do_check_false(engine.wasSynced);
+ engine.sync();
+
+ do_check_false(engine.wasSynced);
+
+ _("Engine.sync calls _sync if it's enabled");
+ engine.enabled = true;
+
+ engine.sync();
+ do_check_true(engine.wasSynced);
+ do_check_eq(engineObserver.topics[0], "weave:engine:sync:start");
+ do_check_eq(engineObserver.topics[1], "weave:engine:sync:finish");
+ run_next_test();
+ } finally {
+ Svc.Prefs.resetBranch("");
+ engine.wasSynced = false;
+ engineObserver.reset();
+ }
+});
+
+add_test(function test_disabled_no_track() {
+ _("When an engine is disabled, its tracker is not tracking.");
+ let engine = new SteamEngine("Steam", Service);
+ let tracker = engine._tracker;
+ do_check_eq(engine, tracker.engine);
+
+ do_check_false(engine.enabled);
+ do_check_false(tracker._isTracking);
+ do_check_empty(tracker.changedIDs);
+
+ do_check_false(tracker.engineIsEnabled());
+ tracker.observe(null, "weave:engine:start-tracking", null);
+ do_check_false(tracker._isTracking);
+ do_check_empty(tracker.changedIDs);
+
+ engine.enabled = true;
+ tracker.observe(null, "weave:engine:start-tracking", null);
+ do_check_true(tracker._isTracking);
+ do_check_empty(tracker.changedIDs);
+
+ tracker.addChangedID("abcdefghijkl");
+ do_check_true(0 < tracker.changedIDs["abcdefghijkl"]);
+ Svc.Prefs.set("engine." + engine.prefName, false);
+ do_check_false(tracker._isTracking);
+ do_check_empty(tracker.changedIDs);
+
+ run_next_test();
+});
diff --git a/services/sync/tests/unit/test_engine_abort.js b/services/sync/tests/unit/test_engine_abort.js
new file mode 100644
index 000000000..8ec866443
--- /dev/null
+++ b/services/sync/tests/unit/test_engine_abort.js
@@ -0,0 +1,69 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+add_test(function test_processIncoming_abort() {
+ _("An abort exception, raised in applyIncoming, will abort _processIncoming.");
+ let engine = new RotaryEngine(Service);
+
+ let collection = new ServerCollection();
+ let id = Utils.makeGUID();
+ let payload = encryptPayload({id: id, denomination: "Record No. " + id});
+ collection.insert(id, payload);
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ new SyncTestingInfrastructure(server);
+ generateNewKeys(Service.collectionKeys);
+
+ _("Create some server data.");
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ _("Fake applyIncoming to abort.");
+ engine._store.applyIncoming = function (record) {
+ let ex = {code: Engine.prototype.eEngineAbortApplyIncoming,
+ cause: "Nooo"};
+ _("Throwing: " + JSON.stringify(ex));
+ throw ex;
+ };
+
+ _("Trying _processIncoming. It will throw after aborting.");
+ let err;
+ try {
+ engine._syncStartup();
+ engine._processIncoming();
+ } catch (ex) {
+ err = ex;
+ }
+
+ do_check_eq(err, "Nooo");
+ err = undefined;
+
+ _("Trying engine.sync(). It will abort without error.");
+ try {
+ // This will quietly fail.
+ engine.sync();
+ } catch (ex) {
+ err = ex;
+ }
+
+ do_check_eq(err, undefined);
+
+ server.stop(run_next_test);
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+});
+
+function run_test() {
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_enginemanager.js b/services/sync/tests/unit/test_enginemanager.js
new file mode 100644
index 000000000..8917cc5bc
--- /dev/null
+++ b/services/sync/tests/unit/test_enginemanager.js
@@ -0,0 +1,114 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ run_next_test();
+}
+
+function PetrolEngine() {}
+PetrolEngine.prototype.name = "petrol";
+
+function DieselEngine() {}
+DieselEngine.prototype.name = "diesel";
+
+function DummyEngine() {}
+DummyEngine.prototype.name = "dummy";
+
+function ActualEngine() {}
+ActualEngine.prototype = {__proto__: Engine.prototype,
+ name: 'actual'};
+
+add_test(function test_basics() {
+ _("We start out with a clean slate");
+
+ let manager = new EngineManager(Service);
+
+ let engines = manager.getAll();
+ do_check_eq(engines.length, 0);
+ do_check_eq(manager.get('dummy'), undefined);
+
+ _("Register an engine");
+ manager.register(DummyEngine);
+ let dummy = manager.get('dummy');
+ do_check_true(dummy instanceof DummyEngine);
+
+ engines = manager.getAll();
+ do_check_eq(engines.length, 1);
+ do_check_eq(engines[0], dummy);
+
+ _("Register an already registered engine is ignored");
+ manager.register(DummyEngine);
+ do_check_eq(manager.get('dummy'), dummy);
+
+ _("Register multiple engines in one go");
+ manager.register([PetrolEngine, DieselEngine]);
+ let petrol = manager.get('petrol');
+ let diesel = manager.get('diesel');
+ do_check_true(petrol instanceof PetrolEngine);
+ do_check_true(diesel instanceof DieselEngine);
+
+ engines = manager.getAll();
+ do_check_eq(engines.length, 3);
+ do_check_neq(engines.indexOf(petrol), -1);
+ do_check_neq(engines.indexOf(diesel), -1);
+
+ _("Retrieve multiple engines in one go");
+ engines = manager.get(["dummy", "diesel"]);
+ do_check_eq(engines.length, 2);
+ do_check_neq(engines.indexOf(dummy), -1);
+ do_check_neq(engines.indexOf(diesel), -1);
+
+ _("getEnabled() only returns enabled engines");
+ engines = manager.getEnabled();
+ do_check_eq(engines.length, 0);
+
+ petrol.enabled = true;
+ engines = manager.getEnabled();
+ do_check_eq(engines.length, 1);
+ do_check_eq(engines[0], petrol);
+
+ dummy.enabled = true;
+ diesel.enabled = true;
+ engines = manager.getEnabled();
+ do_check_eq(engines.length, 3);
+
+ _("getEnabled() returns enabled engines in sorted order");
+ petrol.syncPriority = 1;
+ dummy.syncPriority = 2;
+ diesel.syncPriority = 3;
+
+ engines = manager.getEnabled();
+
+ do_check_array_eq(engines, [petrol, dummy, diesel]);
+
+ _("Changing the priorities should change the order in getEnabled()");
+
+ dummy.syncPriority = 4;
+
+ engines = manager.getEnabled();
+
+ do_check_array_eq(engines, [petrol, diesel, dummy]);
+
+ _("Unregister an engine by name");
+ manager.unregister('dummy');
+ do_check_eq(manager.get('dummy'), undefined);
+ engines = manager.getAll();
+ do_check_eq(engines.length, 2);
+ do_check_eq(engines.indexOf(dummy), -1);
+
+ _("Unregister an engine by value");
+ // manager.unregister() checks for instanceof Engine, so let's make one:
+ manager.register(ActualEngine);
+ let actual = manager.get('actual');
+ do_check_true(actual instanceof ActualEngine);
+ do_check_true(actual instanceof Engine);
+
+ manager.unregister(actual);
+ do_check_eq(manager.get('actual'), undefined);
+
+ run_next_test();
+});
+
diff --git a/services/sync/tests/unit/test_errorhandler_1.js b/services/sync/tests/unit/test_errorhandler_1.js
new file mode 100644
index 000000000..ea2070b48
--- /dev/null
+++ b/services/sync/tests/unit/test_errorhandler_1.js
@@ -0,0 +1,913 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/FileUtils.jsm");
+
+var fakeServer = new SyncServer();
+fakeServer.start();
+
+do_register_cleanup(function() {
+ return new Promise(resolve => {
+ fakeServer.stop(resolve);
+ });
+});
+
+var fakeServerUrl = "http://localhost:" + fakeServer.port;
+
+const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
+
+const PROLONGED_ERROR_DURATION =
+ (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000;
+
+const NON_PROLONGED_ERROR_DURATION =
+ (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') / 2) * 1000;
+
+Service.engineManager.clear();
+
+function setLastSync(lastSyncValue) {
+ Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
+}
+
+var engineManager = Service.engineManager;
+engineManager.register(EHTestsCommon.CatapultEngine);
+
+// This relies on Service/ErrorHandler being a singleton. Fixing this will take
+// a lot of work.
+var errorHandler = Service.errorHandler;
+
+function run_test() {
+ initTestLogging("Trace");
+
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+
+ ensureLegacyIdentityManager();
+
+ run_next_test();
+}
+
+
+function clean() {
+ Service.startOver();
+ Status.resetSync();
+ Status.resetBackoff();
+ errorHandler.didReportProlongedError = false;
+}
+
+add_identity_test(this, function* test_401_logout() {
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ // By calling sync, we ensure we're logged in.
+ yield sync_and_validate_telem();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:sync:error", onSyncError);
+ function onSyncError() {
+ _("Got weave:service:sync:error in first sync.");
+ Svc.Obs.remove("weave:service:sync:error", onSyncError);
+
+ // Wait for the automatic next sync.
+ function onLoginError() {
+ _("Got weave:service:login:error in second sync.");
+ Svc.Obs.remove("weave:service:login:error", onLoginError);
+
+ let expected = isConfiguredWithLegacyIdentity() ?
+ LOGIN_FAILED_LOGIN_REJECTED : LOGIN_FAILED_NETWORK_ERROR;
+
+ do_check_eq(Status.login, expected);
+ do_check_false(Service.isLoggedIn);
+
+ // Clean up.
+ Utils.nextTick(function () {
+ Service.startOver();
+ server.stop(deferred.resolve);
+ });
+ }
+ Svc.Obs.add("weave:service:login:error", onLoginError);
+ }
+
+ // Make sync fail due to login rejected.
+ yield configureIdentity({username: "janedoe"});
+ Service._updateCachedURLs();
+
+ _("Starting first sync.");
+ let ping = yield sync_and_validate_telem(true);
+ deepEqual(ping.failureReason, { name: "httperror", code: 401 });
+ _("First sync done.");
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_credentials_changed_logout() {
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ // By calling sync, we ensure we're logged in.
+ yield sync_and_validate_telem();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ EHTestsCommon.generateCredentialsChangedFailure();
+
+ let ping = yield sync_and_validate_telem(true);
+ equal(ping.status.sync, CREDENTIALS_CHANGED);
+ deepEqual(ping.failureReason, {
+ name: "unexpectederror",
+ error: "Error: Aborting sync, remote setup failed"
+ });
+
+ do_check_eq(Status.sync, CREDENTIALS_CHANGED);
+ do_check_false(Service.isLoggedIn);
+
+ // Clean up.
+ Service.startOver();
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function test_no_lastSync_pref() {
+ // Test reported error.
+ Status.resetSync();
+ errorHandler.dontIgnoreErrors = true;
+ Status.sync = CREDENTIALS_CHANGED;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test unreported error.
+ Status.resetSync();
+ errorHandler.dontIgnoreErrors = true;
+ Status.login = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_true(errorHandler.shouldReportError());
+
+});
+
+add_identity_test(this, function test_shouldReportError() {
+ Status.login = MASTER_PASSWORD_LOCKED;
+ do_check_false(errorHandler.shouldReportError());
+
+ // Give ourselves a clusterURL so that the temporary 401 no-error situation
+ // doesn't come into play.
+ Service.serverURL = fakeServerUrl;
+ Service.clusterURL = fakeServerUrl;
+
+ // Test dontIgnoreErrors, non-network, non-prolonged, login error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.login = LOGIN_FAILED_NO_PASSWORD;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test dontIgnoreErrors, non-network, non-prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.sync = CREDENTIALS_CHANGED;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test dontIgnoreErrors, non-network, prolonged, login error reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.login = LOGIN_FAILED_NO_PASSWORD;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test dontIgnoreErrors, non-network, prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.sync = CREDENTIALS_CHANGED;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test dontIgnoreErrors, network, non-prolonged, login error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.login = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test dontIgnoreErrors, network, non-prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.sync = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test dontIgnoreErrors, network, prolonged, login error reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.login = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test dontIgnoreErrors, network, prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.sync = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_true(errorHandler.shouldReportError());
+
+ // Test non-network, prolonged, login error reported
+ do_check_false(errorHandler.didReportProlongedError);
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.login = LOGIN_FAILED_NO_PASSWORD;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_true(errorHandler.didReportProlongedError);
+
+ // Second time with prolonged error and without resetting
+ // didReportProlongedError, sync error should not be reported.
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.login = LOGIN_FAILED_NO_PASSWORD;
+ do_check_false(errorHandler.shouldReportError());
+ do_check_true(errorHandler.didReportProlongedError);
+
+ // Test non-network, prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ errorHandler.didReportProlongedError = false;
+ Status.sync = CREDENTIALS_CHANGED;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_true(errorHandler.didReportProlongedError);
+ errorHandler.didReportProlongedError = false;
+
+ // Test network, prolonged, login error reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.login = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_true(errorHandler.didReportProlongedError);
+ errorHandler.didReportProlongedError = false;
+
+ // Test network, prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.sync = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_true(errorHandler.didReportProlongedError);
+ errorHandler.didReportProlongedError = false;
+
+ // Test non-network, non-prolonged, login error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.login = LOGIN_FAILED_NO_PASSWORD;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test non-network, non-prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.sync = CREDENTIALS_CHANGED;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test network, non-prolonged, login error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.login = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_false(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test network, non-prolonged, sync error reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.sync = LOGIN_FAILED_NETWORK_ERROR;
+ do_check_false(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test server maintenance, sync errors are not reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.sync = SERVER_MAINTENANCE;
+ do_check_false(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test server maintenance, login errors are not reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.login = SERVER_MAINTENANCE;
+ do_check_false(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test prolonged, server maintenance, sync errors are reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.sync = SERVER_MAINTENANCE;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_true(errorHandler.didReportProlongedError);
+ errorHandler.didReportProlongedError = false;
+
+ // Test prolonged, server maintenance, login errors are reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = false;
+ Status.login = SERVER_MAINTENANCE;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_true(errorHandler.didReportProlongedError);
+ errorHandler.didReportProlongedError = false;
+
+ // Test dontIgnoreErrors, server maintenance, sync errors are reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.sync = SERVER_MAINTENANCE;
+ do_check_true(errorHandler.shouldReportError());
+ // dontIgnoreErrors means we don't set didReportProlongedError
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test dontIgnoreErrors, server maintenance, login errors are reported
+ Status.resetSync();
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.login = SERVER_MAINTENANCE;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test dontIgnoreErrors, prolonged, server maintenance,
+ // sync errors are reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.sync = SERVER_MAINTENANCE;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+
+ // Test dontIgnoreErrors, prolonged, server maintenance,
+ // login errors are reported
+ Status.resetSync();
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.dontIgnoreErrors = true;
+ Status.login = SERVER_MAINTENANCE;
+ do_check_true(errorHandler.shouldReportError());
+ do_check_false(errorHandler.didReportProlongedError);
+});
+
+add_identity_test(this, function* test_shouldReportError_master_password() {
+ _("Test error ignored due to locked master password");
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ // Monkey patch Service.verifyLogin to imitate
+ // master password being locked.
+ Service._verifyLogin = Service.verifyLogin;
+ Service.verifyLogin = function () {
+ Status.login = MASTER_PASSWORD_LOCKED;
+ return false;
+ };
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+ do_check_false(errorHandler.shouldReportError());
+
+ // Clean up.
+ Service.verifyLogin = Service._verifyLogin;
+ clean();
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ yield deferred.promise;
+});
+
+// Test that even if we don't have a cluster URL, a login failure due to
+// authentication errors is always reported.
+add_identity_test(this, function test_shouldReportLoginFailureWithNoCluster() {
+ // Ensure no clusterURL - any error not specific to login should not be reported.
+ Service.serverURL = "";
+ Service.clusterURL = "";
+
+ // Test explicit "login rejected" state.
+ Status.resetSync();
+ // If we have a LOGIN_REJECTED state, we always report the error.
+ Status.login = LOGIN_FAILED_LOGIN_REJECTED;
+ do_check_true(errorHandler.shouldReportError());
+ // But any other status with a missing clusterURL is treated as a mid-sync
+ // 401 (ie, should be treated as a node reassignment)
+ Status.login = LOGIN_SUCCEEDED;
+ do_check_false(errorHandler.shouldReportError());
+});
+
+// XXX - how to arrange for 'Service.identity.basicPassword = null;' in
+// an fxaccounts environment?
+add_task(function* test_login_syncAndReportErrors_non_network_error() {
+ // Test non-network errors are reported
+ // when calling syncAndReportErrors
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+ Service.identity.basicPassword = null;
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:login:error", onSyncError);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_sync_syncAndReportErrors_non_network_error() {
+ // Test non-network errors are reported
+ // when calling syncAndReportErrors
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ // By calling sync, we ensure we're logged in.
+ Service.sync();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ EHTestsCommon.generateCredentialsChangedFailure();
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ do_check_eq(Status.sync, CREDENTIALS_CHANGED);
+ // If we clean this tick, telemetry won't get the right error
+ server.stop(() => {
+ clean();
+ deferred.resolve();
+ });
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ let ping = yield wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
+ equal(ping.status.sync, CREDENTIALS_CHANGED);
+ deepEqual(ping.failureReason, {
+ name: "unexpectederror",
+ error: "Error: Aborting sync, remote setup failed"
+ });
+ yield deferred.promise;
+});
+
+// XXX - how to arrange for 'Service.identity.basicPassword = null;' in
+// an fxaccounts environment?
+add_task(function* test_login_syncAndReportErrors_prolonged_non_network_error() {
+ // Test prolonged, non-network errors are
+ // reported when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+ Service.identity.basicPassword = null;
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:login:error", onSyncError);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_sync_syncAndReportErrors_prolonged_non_network_error() {
+ // Test prolonged, non-network errors are
+ // reported when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ // By calling sync, we ensure we're logged in.
+ Service.sync();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ EHTestsCommon.generateCredentialsChangedFailure();
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ do_check_eq(Status.sync, CREDENTIALS_CHANGED);
+ // If we clean this tick, telemetry won't get the right error
+ server.stop(() => {
+ clean();
+ deferred.resolve();
+ });
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ let ping = yield wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
+ equal(ping.status.sync, CREDENTIALS_CHANGED);
+ deepEqual(ping.failureReason, {
+ name: "unexpectederror",
+ error: "Error: Aborting sync, remote setup failed"
+ });
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_login_syncAndReportErrors_network_error() {
+ // Test network errors are reported when calling syncAndReportErrors.
+ yield configureIdentity({username: "broken.wipe"});
+ Service.serverURL = fakeServerUrl;
+ Service.clusterURL = fakeServerUrl;
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:login:error", onSyncError);
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ clean();
+ deferred.resolve();
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+
+add_test(function test_sync_syncAndReportErrors_network_error() {
+ // Test network errors are reported when calling syncAndReportErrors.
+ Services.io.offline = true;
+
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+
+ Services.io.offline = false;
+ clean();
+ run_next_test();
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+});
+
+add_identity_test(this, function* test_login_syncAndReportErrors_prolonged_network_error() {
+ // Test prolonged, network errors are reported
+ // when calling syncAndReportErrors.
+ yield configureIdentity({username: "johndoe"});
+
+ Service.serverURL = fakeServerUrl;
+ Service.clusterURL = fakeServerUrl;
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:login:error", onSyncError);
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ clean();
+ deferred.resolve();
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_test(function test_sync_syncAndReportErrors_prolonged_network_error() {
+ // Test prolonged, network errors are reported
+ // when calling syncAndReportErrors.
+ Services.io.offline = true;
+
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+
+ Services.io.offline = false;
+ clean();
+ run_next_test();
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+});
+
+add_task(function* test_login_prolonged_non_network_error() {
+ // Test prolonged, non-network errors are reported
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+ Service.identity.basicPassword = null;
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:login:error", onSyncError);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_task(function* test_sync_prolonged_non_network_error() {
+ // Test prolonged, non-network errors are reported
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ // By calling sync, we ensure we're logged in.
+ Service.sync();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ EHTestsCommon.generateCredentialsChangedFailure();
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+ server.stop(() => {
+ clean();
+ deferred.resolve();
+ });
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+
+ let ping = yield sync_and_validate_telem(true);
+ equal(ping.status.sync, PROLONGED_SYNC_FAILURE);
+ deepEqual(ping.failureReason, {
+ name: "unexpectederror",
+ error: "Error: Aborting sync, remote setup failed"
+ });
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_login_prolonged_network_error() {
+ // Test prolonged, network errors are reported
+ yield configureIdentity({username: "johndoe"});
+ Service.serverURL = fakeServerUrl;
+ Service.clusterURL = fakeServerUrl;
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:login:error", onSyncError);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ deferred.resolve();
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_test(function test_sync_prolonged_network_error() {
+ // Test prolonged, network errors are reported
+ Services.io.offline = true;
+
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ Services.io.offline = false;
+ clean();
+ run_next_test();
+ });
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+});
+
+add_task(function* test_login_non_network_error() {
+ // Test non-network errors are reported
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+ Service.identity.basicPassword = null;
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:login:error", onSyncError);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_task(function* test_sync_non_network_error() {
+ // Test non-network errors are reported
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ // By calling sync, we ensure we're logged in.
+ Service.sync();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ EHTestsCommon.generateCredentialsChangedFailure();
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ do_check_eq(Status.sync, CREDENTIALS_CHANGED);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_login_network_error() {
+ yield configureIdentity({username: "johndoe"});
+ Service.serverURL = fakeServerUrl;
+ Service.clusterURL = fakeServerUrl;
+
+ let deferred = Promise.defer();
+ // Test network errors are not reported.
+ Svc.Obs.add("weave:ui:clear-error", function onClearError() {
+ Svc.Obs.remove("weave:ui:clear-error", onClearError);
+
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Services.io.offline = false;
+ clean();
+ deferred.resolve()
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_test(function test_sync_network_error() {
+ // Test network errors are not reported.
+ Services.io.offline = true;
+
+ Svc.Obs.add("weave:ui:sync:finish", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:sync:finish", onUIUpdate);
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Services.io.offline = false;
+ clean();
+ run_next_test();
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+});
+
+add_identity_test(this, function* test_sync_server_maintenance_error() {
+ // Test server maintenance errors are not reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ const BACKOFF = 42;
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 503,
+ headers: {"retry-after": BACKOFF}};
+
+ function onSyncError() {
+ do_throw("Shouldn't get here!");
+ }
+ Svc.Obs.add("weave:ui:sync:error", onSyncError);
+
+ do_check_eq(Status.service, STATUS_OK);
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:finish", function onSyncFinish() {
+ Svc.Obs.remove("weave:ui:sync:finish", onSyncFinish);
+
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+ server.stop(() => {
+ clean();
+ deferred.resolve();
+ })
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ let ping = yield sync_and_validate_telem(true);
+ equal(ping.status.sync, SERVER_MAINTENANCE);
+ deepEqual(ping.engines.find(e => e.failureReason).failureReason, { name: "httperror", code: 503 })
+
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_info_collections_login_server_maintenance_error() {
+ // Test info/collections server maintenance errors are not reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ Service.username = "broken.info";
+ yield configureIdentity({username: "broken.info"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ function onUIUpdate() {
+ do_throw("Shouldn't experience UI update!");
+ }
+ Svc.Obs.add("weave:ui:login:error", onUIUpdate);
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() {
+ Svc.Obs.remove("weave:ui:clear-error", onLoginFinish);
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_meta_global_login_server_maintenance_error() {
+ // Test meta/global server maintenance errors are not reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.meta"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ function onUIUpdate() {
+ do_throw("Shouldn't get here!");
+ }
+ Svc.Obs.add("weave:ui:login:error", onUIUpdate);
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() {
+ Svc.Obs.remove("weave:ui:clear-error", onLoginFinish);
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
diff --git a/services/sync/tests/unit/test_errorhandler_2.js b/services/sync/tests/unit/test_errorhandler_2.js
new file mode 100644
index 000000000..41f8ee727
--- /dev/null
+++ b/services/sync/tests/unit/test_errorhandler_2.js
@@ -0,0 +1,1012 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/FileUtils.jsm");
+
+var fakeServer = new SyncServer();
+fakeServer.start();
+
+do_register_cleanup(function() {
+ return new Promise(resolve => {
+ fakeServer.stop(resolve);
+ });
+});
+
+var fakeServerUrl = "http://localhost:" + fakeServer.port;
+
+const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
+
+const PROLONGED_ERROR_DURATION =
+ (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000;
+
+const NON_PROLONGED_ERROR_DURATION =
+ (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') / 2) * 1000;
+
+Service.engineManager.clear();
+
+function setLastSync(lastSyncValue) {
+ Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
+}
+
+var engineManager = Service.engineManager;
+engineManager.register(EHTestsCommon.CatapultEngine);
+
+// This relies on Service/ErrorHandler being a singleton. Fixing this will take
+// a lot of work.
+var errorHandler = Service.errorHandler;
+
+function run_test() {
+ initTestLogging("Trace");
+
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+
+ ensureLegacyIdentityManager();
+
+ run_next_test();
+}
+
+
+function clean() {
+ Service.startOver();
+ Status.resetSync();
+ Status.resetBackoff();
+ errorHandler.didReportProlongedError = false;
+}
+
+add_identity_test(this, function* test_crypto_keys_login_server_maintenance_error() {
+ Status.resetSync();
+ // Test crypto/keys server maintenance errors are not reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.keys"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ // Force re-download of keys
+ Service.collectionKeys.clear();
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ function onUIUpdate() {
+ do_throw("Shouldn't get here!");
+ }
+ Svc.Obs.add("weave:ui:login:error", onUIUpdate);
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() {
+ Svc.Obs.remove("weave:ui:clear-error", onLoginFinish);
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_task(function* test_sync_prolonged_server_maintenance_error() {
+ // Test prolonged server maintenance errors are reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ const BACKOFF = 42;
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 503,
+ headers: {"retry-after": BACKOFF}};
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ server.stop(() => {
+ clean();
+ deferred.resolve();
+ });
+ });
+
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ let ping = yield sync_and_validate_telem(true);
+ deepEqual(ping.status.sync, PROLONGED_SYNC_FAILURE);
+ deepEqual(ping.engines.find(e => e.failureReason).failureReason,
+ { name: "httperror", code: 503 });
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_info_collections_login_prolonged_server_maintenance_error(){
+ // Test info/collections prolonged server maintenance errors are reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.info"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_meta_global_login_prolonged_server_maintenance_error(){
+ // Test meta/global prolonged server maintenance errors are reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.meta"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_download_crypto_keys_login_prolonged_server_maintenance_error(){
+ // Test crypto/keys prolonged server maintenance errors are reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.keys"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+ // Force re-download of keys
+ Service.collectionKeys.clear();
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_upload_crypto_keys_login_prolonged_server_maintenance_error(){
+ // Test crypto/keys prolonged server maintenance errors are reported.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ // Start off with an empty account, do not upload a key.
+ yield configureIdentity({username: "broken.keys"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_wipeServer_login_prolonged_server_maintenance_error(){
+ // Test that we report prolonged server maintenance errors that occur whilst
+ // wiping the server.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ // Start off with an empty account, do not upload a key.
+ yield configureIdentity({username: "broken.wipe"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_wipeRemote_prolonged_server_maintenance_error(){
+ // Test that we report prolonged server maintenance errors that occur whilst
+ // wiping all remote devices.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable);
+ yield configureIdentity({username: "broken.wipe"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+ EHTestsCommon.generateAndUploadKeys();
+
+ let engine = engineManager.get("catapult");
+ engine.exception = null;
+ engine.enabled = true;
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
+ do_check_true(errorHandler.didReportProlongedError);
+ server.stop(() => {
+ clean();
+ deferred.resolve();
+ });
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ Svc.Prefs.set("firstSync", "wipeRemote");
+ setLastSync(PROLONGED_ERROR_DURATION);
+ let ping = yield sync_and_validate_telem(true);
+ deepEqual(ping.failureReason, { name: "httperror", code: 503 });
+ yield deferred.promise;
+});
+
+add_task(function* test_sync_syncAndReportErrors_server_maintenance_error() {
+ // Test server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ const BACKOFF = 42;
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 503,
+ headers: {"retry-after": BACKOFF}};
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_info_collections_login_syncAndReportErrors_server_maintenance_error() {
+ // Test info/collections server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.info"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_meta_global_login_syncAndReportErrors_server_maintenance_error() {
+ // Test meta/global server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.meta"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_download_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
+ // Test crypto/keys server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.keys"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+ // Force re-download of keys
+ Service.collectionKeys.clear();
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_upload_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
+ // Test crypto/keys server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ // Start off with an empty account, do not upload a key.
+ yield configureIdentity({username: "broken.keys"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_wipeServer_login_syncAndReportErrors_server_maintenance_error() {
+ // Test crypto/keys server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ // Start off with an empty account, do not upload a key.
+ yield configureIdentity({username: "broken.wipe"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_wipeRemote_syncAndReportErrors_server_maintenance_error(){
+ // Test that we report prolonged server maintenance errors that occur whilst
+ // wiping all remote devices.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ yield configureIdentity({username: "broken.wipe"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+ EHTestsCommon.generateAndUploadKeys();
+
+ let engine = engineManager.get("catapult");
+ engine.exception = null;
+ engine.enabled = true;
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ Svc.Prefs.set("firstSync", "wipeRemote");
+ setLastSync(NON_PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_task(function* test_sync_syncAndReportErrors_prolonged_server_maintenance_error() {
+ // Test prolonged server maintenance errors are
+ // reported when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ const BACKOFF = 42;
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 503,
+ headers: {"retry-after": BACKOFF}};
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+ // Test info/collections server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.info"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+ // Test meta/global server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.meta"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+ // Test crypto/keys server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+ yield EHTestsCommon.setUp(server);
+
+ yield configureIdentity({username: "broken.keys"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+ // Force re-download of keys
+ Service.collectionKeys.clear();
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+ // Test crypto/keys server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ // Start off with an empty account, do not upload a key.
+ yield configureIdentity({username: "broken.keys"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+ // Test crypto/keys server maintenance errors are reported
+ // when calling syncAndReportErrors.
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ // Start off with an empty account, do not upload a key.
+ yield configureIdentity({username: "broken.wipe"});
+ Service.serverURL = server.baseURI + "/maintenance/";
+ Service.clusterURL = server.baseURI + "/maintenance/";
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.service, STATUS_OK);
+
+ setLastSync(PROLONGED_ERROR_DURATION);
+ errorHandler.syncAndReportErrors();
+ yield deferred.promise;
+});
+
+add_task(function* test_sync_engine_generic_fail() {
+ let server = EHTestsCommon.sync_httpd_setup();
+
+let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.sync = function sync() {
+ Svc.Obs.notify("weave:engine:sync:error", ENGINE_UNKNOWN_FAIL, "catapult");
+ };
+
+ let log = Log.repository.getLogger("Sync.ErrorHandler");
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+
+ do_check_eq(Status.engines["catapult"], undefined);
+
+ let deferred = Promise.defer();
+ // Don't wait for reset-file-log until the sync is underway.
+ // This avoids us catching a delayed notification from an earlier test.
+ Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() {
+ Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish);
+
+ log.info("Adding reset-file-log observer.");
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ // Put these checks here, not after sync(), so that we aren't racing the
+ // log handler... which resets everything just a few lines below!
+ _("Status.engines: " + JSON.stringify(Status.engines));
+ do_check_eq(Status.engines["catapult"], ENGINE_UNKNOWN_FAIL);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+ // Test Error log was written on SYNC_FAILED_PARTIAL.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+
+ clean();
+
+ let syncErrors = sumHistogram("WEAVE_ENGINE_SYNC_ERRORS", { key: "catapult" });
+ do_check_true(syncErrors, 1);
+
+ server.stop(() => {
+ clean();
+ deferred.resolve();
+ });
+ });
+ });
+
+ do_check_true(yield EHTestsCommon.setUp(server));
+ let ping = yield sync_and_validate_telem(true);
+ deepEqual(ping.status.service, SYNC_FAILED_PARTIAL);
+ deepEqual(ping.engines.find(e => e.status).status, ENGINE_UNKNOWN_FAIL);
+
+ yield deferred.promise;
+});
+
+add_test(function test_logs_on_sync_error_despite_shouldReportError() {
+ _("Ensure that an error is still logged when weave:service:sync:error " +
+ "is notified, despite shouldReportError returning false.");
+
+ let log = Log.repository.getLogger("Sync.ErrorHandler");
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+ log.info("TESTING");
+
+ // Ensure that we report no error.
+ Status.login = MASTER_PASSWORD_LOCKED;
+ do_check_false(errorHandler.shouldReportError());
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ // Test that error log was written.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+
+ clean();
+ run_next_test();
+ });
+ Svc.Obs.notify("weave:service:sync:error", {});
+});
+
+add_test(function test_logs_on_login_error_despite_shouldReportError() {
+ _("Ensure that an error is still logged when weave:service:login:error " +
+ "is notified, despite shouldReportError returning false.");
+
+ let log = Log.repository.getLogger("Sync.ErrorHandler");
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+ log.info("TESTING");
+
+ // Ensure that we report no error.
+ Status.login = MASTER_PASSWORD_LOCKED;
+ do_check_false(errorHandler.shouldReportError());
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ // Test that error log was written.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+
+ clean();
+ run_next_test();
+ });
+ Svc.Obs.notify("weave:service:login:error", {});
+});
+
+// This test should be the last one since it monkeypatches the engine object
+// and we should only have one engine object throughout the file (bug 629664).
+add_task(function* test_engine_applyFailed() {
+ let server = EHTestsCommon.sync_httpd_setup();
+
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ delete engine.exception;
+ engine.sync = function sync() {
+ Svc.Obs.notify("weave:engine:sync:applied", {newFailed:1}, "catapult");
+ };
+
+ let log = Log.repository.getLogger("Sync.ErrorHandler");
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ do_check_eq(Status.engines["catapult"], ENGINE_APPLY_FAIL);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+ // Test Error log was written on SYNC_FAILED_PARTIAL.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+
+ clean();
+ server.stop(deferred.resolve);
+ });
+
+ do_check_eq(Status.engines["catapult"], undefined);
+ do_check_true(yield EHTestsCommon.setUp(server));
+ Service.sync();
+ yield deferred.promise;
+});
diff --git a/services/sync/tests/unit/test_errorhandler_eol.js b/services/sync/tests/unit/test_errorhandler_eol.js
new file mode 100644
index 000000000..c8d2ff4be
--- /dev/null
+++ b/services/sync/tests/unit/test_errorhandler_eol.js
@@ -0,0 +1,137 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+
+Cu.import("resource://testing-common/services/sync/fakeservices.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function baseHandler(eolCode, request, response, statusCode, status, body) {
+ let alertBody = {
+ code: eolCode,
+ message: "Service is EOLed.",
+ url: "http://getfirefox.com",
+ };
+ response.setHeader("X-Weave-Timestamp", "" + new_timestamp(), false);
+ response.setHeader("X-Weave-Alert", "" + JSON.stringify(alertBody), false);
+ response.setStatusLine(request.httpVersion, statusCode, status);
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function handler513(request, response) {
+ let statusCode = 513;
+ let status = "Upgrade Required";
+ let body = "{}";
+ baseHandler("hard-eol", request, response, statusCode, status, body);
+}
+
+function handler200(eolCode) {
+ return function (request, response) {
+ let statusCode = 200;
+ let status = "OK";
+ let body = "{\"meta\": 123456789010}";
+ baseHandler(eolCode, request, response, statusCode, status, body);
+ };
+}
+
+function sync_httpd_setup(infoHandler) {
+ let handlers = {
+ "/1.1/johndoe/info/collections": infoHandler,
+ };
+ return httpd_setup(handlers);
+}
+
+function* setUp(server) {
+ yield configureIdentity({username: "johndoe"});
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+ new FakeCryptoService();
+}
+
+function run_test() {
+ run_next_test();
+}
+
+function do_check_soft_eol(eh, start) {
+ // We subtract 1000 because the stored value is in second precision.
+ do_check_true(eh.earliestNextAlert >= (start + eh.MINIMUM_ALERT_INTERVAL_MSEC - 1000));
+ do_check_eq("soft-eol", eh.currentAlertMode);
+}
+function do_check_hard_eol(eh, start) {
+ // We subtract 1000 because the stored value is in second precision.
+ do_check_true(eh.earliestNextAlert >= (start + eh.MINIMUM_ALERT_INTERVAL_MSEC - 1000));
+ do_check_eq("hard-eol", eh.currentAlertMode);
+ do_check_true(Status.eol);
+}
+
+add_identity_test(this, function* test_200_hard() {
+ let eh = Service.errorHandler;
+ let start = Date.now();
+ let server = sync_httpd_setup(handler200("hard-eol"));
+ yield setUp(server);
+
+ let deferred = Promise.defer();
+ let obs = function (subject, topic, data) {
+ Svc.Obs.remove("weave:eol", obs);
+ do_check_eq("hard-eol", subject.code);
+ do_check_hard_eol(eh, start);
+ do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
+ eh.clearServerAlerts();
+ server.stop(deferred.resolve);
+ };
+
+ Svc.Obs.add("weave:eol", obs);
+ Service._fetchInfo();
+ Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_513_hard() {
+ let eh = Service.errorHandler;
+ let start = Date.now();
+ let server = sync_httpd_setup(handler513);
+ yield setUp(server);
+
+ let deferred = Promise.defer();
+ let obs = function (subject, topic, data) {
+ Svc.Obs.remove("weave:eol", obs);
+ do_check_eq("hard-eol", subject.code);
+ do_check_hard_eol(eh, start);
+ do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
+ eh.clearServerAlerts();
+ server.stop(deferred.resolve);
+ };
+
+ Svc.Obs.add("weave:eol", obs);
+ try {
+ Service._fetchInfo();
+ Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
+ } catch (ex) {
+ // Because fetchInfo will fail on a 513.
+ }
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_200_soft() {
+ let eh = Service.errorHandler;
+ let start = Date.now();
+ let server = sync_httpd_setup(handler200("soft-eol"));
+ yield setUp(server);
+
+ let deferred = Promise.defer();
+ let obs = function (subject, topic, data) {
+ Svc.Obs.remove("weave:eol", obs);
+ do_check_eq("soft-eol", subject.code);
+ do_check_soft_eol(eh, start);
+ do_check_eq(Service.scheduler.singleDeviceInterval, Service.scheduler.syncInterval);
+ eh.clearServerAlerts();
+ server.stop(deferred.resolve);
+ };
+
+ Svc.Obs.add("weave:eol", obs);
+ Service._fetchInfo();
+ Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
+ yield deferred.promise;
+});
diff --git a/services/sync/tests/unit/test_errorhandler_filelog.js b/services/sync/tests/unit/test_errorhandler_filelog.js
new file mode 100644
index 000000000..993a478fd
--- /dev/null
+++ b/services/sync/tests/unit/test_errorhandler_filelog.js
@@ -0,0 +1,370 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://gre/modules/FileUtils.jsm");
+Cu.import("resource://gre/modules/NetUtil.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+
+const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
+
+// Delay to wait before cleanup, to allow files to age.
+// This is so large because the file timestamp granularity is per-second, and
+// so otherwise we can end up with all of our files -- the ones we want to
+// keep, and the ones we want to clean up -- having the same modified time.
+const CLEANUP_DELAY = 2000;
+const DELAY_BUFFER = 500; // Buffer for timers on different OS platforms.
+
+const PROLONGED_ERROR_DURATION =
+ (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000;
+
+var errorHandler = Service.errorHandler;
+
+function setLastSync(lastSyncValue) {
+ Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
+}
+
+function run_test() {
+ initTestLogging("Trace");
+
+ Log.repository.getLogger("Sync.LogManager").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+
+ validate_all_future_pings();
+
+ run_next_test();
+}
+
+add_test(function test_noOutput() {
+ // Ensure that the log appender won't print anything.
+ errorHandler._logManager._fileAppender.level = Log.Level.Fatal + 1;
+
+ // Clear log output from startup.
+ Svc.Prefs.set("log.appender.file.logOnSuccess", false);
+ Svc.Obs.notify("weave:service:sync:finish");
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLogOuter() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLogOuter);
+ // Clear again without having issued any output.
+ Svc.Prefs.set("log.appender.file.logOnSuccess", true);
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLogInner() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLogInner);
+
+ errorHandler._logManager._fileAppender.level = Log.Level.Trace;
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ });
+
+ // Fake a successful sync.
+ Svc.Obs.notify("weave:service:sync:finish");
+ });
+});
+
+add_test(function test_logOnSuccess_false() {
+ Svc.Prefs.set("log.appender.file.logOnSuccess", false);
+
+ let log = Log.repository.getLogger("Sync.Test.FileLog");
+ log.info("this won't show up");
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+ // No log file was written.
+ do_check_false(logsdir.directoryEntries.hasMoreElements());
+
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ });
+
+ // Fake a successful sync.
+ Svc.Obs.notify("weave:service:sync:finish");
+});
+
+function readFile(file, callback) {
+ NetUtil.asyncFetch({
+ uri: NetUtil.newURI(file),
+ loadUsingSystemPrincipal: true
+ }, function (inputStream, statusCode, request) {
+ let data = NetUtil.readInputStreamToString(inputStream,
+ inputStream.available());
+ callback(statusCode, data);
+ });
+}
+
+add_test(function test_logOnSuccess_true() {
+ Svc.Prefs.set("log.appender.file.logOnSuccess", true);
+
+ let log = Log.repository.getLogger("Sync.Test.FileLog");
+ const MESSAGE = "this WILL show up";
+ log.info(MESSAGE);
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ // Exactly one log file was written.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_eq(logfile.leafName.slice(-4), ".txt");
+ do_check_true(logfile.leafName.startsWith("success-sync-"), logfile.leafName);
+ do_check_false(entries.hasMoreElements());
+
+ // Ensure the log message was actually written to file.
+ readFile(logfile, function (error, data) {
+ do_check_true(Components.isSuccessCode(error));
+ do_check_neq(data.indexOf(MESSAGE), -1);
+
+ // Clean up.
+ try {
+ logfile.remove(false);
+ } catch(ex) {
+ dump("Couldn't delete file: " + ex + "\n");
+ // Stupid Windows box.
+ }
+
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ });
+ });
+
+ // Fake a successful sync.
+ Svc.Obs.notify("weave:service:sync:finish");
+});
+
+add_test(function test_sync_error_logOnError_false() {
+ Svc.Prefs.set("log.appender.file.logOnError", false);
+
+ let log = Log.repository.getLogger("Sync.Test.FileLog");
+ log.info("this won't show up");
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+ // No log file was written.
+ do_check_false(logsdir.directoryEntries.hasMoreElements());
+
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ });
+
+ // Fake an unsuccessful sync due to prolonged failure.
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Svc.Obs.notify("weave:service:sync:error");
+});
+
+add_test(function test_sync_error_logOnError_true() {
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+
+ let log = Log.repository.getLogger("Sync.Test.FileLog");
+ const MESSAGE = "this WILL show up";
+ log.info(MESSAGE);
+
+ // We need to wait until the log cleanup started by this test is complete
+ // or the next test will fail as it is ongoing.
+ Svc.Obs.add("services-tests:common:log-manager:cleanup-logs", function onCleanupLogs() {
+ Svc.Obs.remove("services-tests:common:log-manager:cleanup-logs", onCleanupLogs);
+ run_next_test();
+ });
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ // Exactly one log file was written.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_eq(logfile.leafName.slice(-4), ".txt");
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+ do_check_false(entries.hasMoreElements());
+
+ // Ensure the log message was actually written to file.
+ readFile(logfile, function (error, data) {
+ do_check_true(Components.isSuccessCode(error));
+ do_check_neq(data.indexOf(MESSAGE), -1);
+
+ // Clean up.
+ try {
+ logfile.remove(false);
+ } catch(ex) {
+ dump("Couldn't delete file: " + ex + "\n");
+ // Stupid Windows box.
+ }
+
+ Svc.Prefs.resetBranch("");
+ });
+ });
+
+ // Fake an unsuccessful sync due to prolonged failure.
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Svc.Obs.notify("weave:service:sync:error");
+});
+
+add_test(function test_login_error_logOnError_false() {
+ Svc.Prefs.set("log.appender.file.logOnError", false);
+
+ let log = Log.repository.getLogger("Sync.Test.FileLog");
+ log.info("this won't show up");
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+ // No log file was written.
+ do_check_false(logsdir.directoryEntries.hasMoreElements());
+
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ });
+
+ // Fake an unsuccessful login due to prolonged failure.
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Svc.Obs.notify("weave:service:login:error");
+});
+
+add_test(function test_login_error_logOnError_true() {
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+
+ let log = Log.repository.getLogger("Sync.Test.FileLog");
+ const MESSAGE = "this WILL show up";
+ log.info(MESSAGE);
+
+ // We need to wait until the log cleanup started by this test is complete
+ // or the next test will fail as it is ongoing.
+ Svc.Obs.add("services-tests:common:log-manager:cleanup-logs", function onCleanupLogs() {
+ Svc.Obs.remove("services-tests:common:log-manager:cleanup-logs", onCleanupLogs);
+ run_next_test();
+ });
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ // Exactly one log file was written.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_eq(logfile.leafName.slice(-4), ".txt");
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+ do_check_false(entries.hasMoreElements());
+
+ // Ensure the log message was actually written to file.
+ readFile(logfile, function (error, data) {
+ do_check_true(Components.isSuccessCode(error));
+ do_check_neq(data.indexOf(MESSAGE), -1);
+
+ // Clean up.
+ try {
+ logfile.remove(false);
+ } catch(ex) {
+ dump("Couldn't delete file: " + ex + "\n");
+ // Stupid Windows box.
+ }
+
+ Svc.Prefs.resetBranch("");
+ });
+ });
+
+ // Fake an unsuccessful login due to prolonged failure.
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Svc.Obs.notify("weave:service:login:error");
+});
+
+
+add_test(function test_errorLog_dumpAddons() {
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+
+ let log = Log.repository.getLogger("Sync.Test.FileLog");
+
+ // We need to wait until the log cleanup started by this test is complete
+ // or the next test will fail as it is ongoing.
+ Svc.Obs.add("services-tests:common:log-manager:cleanup-logs", function onCleanupLogs() {
+ Svc.Obs.remove("services-tests:common:log-manager:cleanup-logs", onCleanupLogs);
+ run_next_test();
+ });
+
+ Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+ Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_eq(logfile.leafName.slice(-4), ".txt");
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+ do_check_false(entries.hasMoreElements());
+
+ // Ensure we logged some addon list (which is probably empty)
+ readFile(logfile, function (error, data) {
+ do_check_true(Components.isSuccessCode(error));
+ do_check_neq(data.indexOf("Addons installed"), -1);
+
+ // Clean up.
+ try {
+ logfile.remove(false);
+ } catch(ex) {
+ dump("Couldn't delete file: " + ex + "\n");
+ // Stupid Windows box.
+ }
+
+ Svc.Prefs.resetBranch("");
+ });
+ });
+
+ // Fake an unsuccessful sync due to prolonged failure.
+ setLastSync(PROLONGED_ERROR_DURATION);
+ Svc.Obs.notify("weave:service:sync:error");
+});
+
+// Check that error log files are deleted above an age threshold.
+add_test(function test_logErrorCleanup_age() {
+ _("Beginning test_logErrorCleanup_age.");
+ let maxAge = CLEANUP_DELAY / 1000;
+ let oldLogs = [];
+ let numLogs = 10;
+ let errString = "some error log\n";
+
+ Svc.Prefs.set("log.appender.file.logOnError", true);
+ Svc.Prefs.set("log.appender.file.maxErrorAge", maxAge);
+
+ _("Making some files.");
+ for (let i = 0; i < numLogs; i++) {
+ let now = Date.now();
+ let filename = "error-sync-" + now + "" + i + ".txt";
+ let newLog = FileUtils.getFile("ProfD", ["weave", "logs", filename]);
+ let foStream = FileUtils.openFileOutputStream(newLog);
+ foStream.write(errString, errString.length);
+ foStream.close();
+ _(" > Created " + filename);
+ oldLogs.push(newLog.leafName);
+ }
+
+ Svc.Obs.add("services-tests:common:log-manager:cleanup-logs", function onCleanupLogs() {
+ Svc.Obs.remove("services-tests:common:log-manager:cleanup-logs", onCleanupLogs);
+
+ // Only the newest created log file remains.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_true(oldLogs.every(function (e) {
+ return e != logfile.leafName;
+ }));
+ do_check_false(entries.hasMoreElements());
+
+ // Clean up.
+ try {
+ logfile.remove(false);
+ } catch(ex) {
+ dump("Couldn't delete file: " + ex + "\n");
+ // Stupid Windows box.
+ }
+
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ });
+
+ let delay = CLEANUP_DELAY + DELAY_BUFFER;
+
+ _("Cleaning up logs after " + delay + "msec.");
+ CommonUtils.namedTimer(function onTimer() {
+ Svc.Obs.notify("weave:service:sync:error");
+ }, delay, this, "cleanup-timer");
+});
diff --git a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
new file mode 100644
index 000000000..953f59fcb
--- /dev/null
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -0,0 +1,282 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/fakeservices.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+initTestLogging("Trace");
+
+var engineManager = Service.engineManager;
+engineManager.clear();
+
+function promiseStopServer(server) {
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ return deferred.promise;
+}
+
+function CatapultEngine() {
+ SyncEngine.call(this, "Catapult", Service);
+}
+CatapultEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ exception: null, // tests fill this in
+ _sync: function _sync() {
+ throw this.exception;
+ }
+};
+
+function sync_httpd_setup() {
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+ let collections = collectionsHelper.collections;
+
+ let catapultEngine = engineManager.get("catapult");
+ let engines = {catapult: {version: catapultEngine.version,
+ syncID: catapultEngine.syncID}};
+
+ // Track these using the collections helper, which keeps modified times
+ // up-to-date.
+ let clientsColl = new ServerCollection({}, true);
+ let keysWBO = new ServerWBO("keys");
+ let globalWBO = new ServerWBO("global", {storageVersion: STORAGE_VERSION,
+ syncID: Utils.makeGUID(),
+ engines: engines});
+
+ let handlers = {
+ "/1.1/johndoe/info/collections": collectionsHelper.handler,
+ "/1.1/johndoe/storage/meta/global": upd("meta", globalWBO.handler()),
+ "/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler()),
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", keysWBO.handler())
+ };
+ return httpd_setup(handlers);
+}
+
+function* setUp(server) {
+ yield configureIdentity({username: "johndoe"});
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+ new FakeCryptoService();
+}
+
+function generateAndUploadKeys(server) {
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
+ return serverKeys.upload(res).success;
+}
+
+
+add_identity_test(this, function* test_backoff500() {
+ _("Test: HTTP 500 sets backoff status.");
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 500};
+
+ try {
+ do_check_false(Status.enforceBackoff);
+
+ // Forcibly create and upload keys here -- otherwise we don't get to the 500!
+ do_check_true(generateAndUploadKeys(server));
+
+ Service.login();
+ Service.sync();
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ } finally {
+ Status.resetBackoff();
+ Service.startOver();
+ }
+ yield promiseStopServer(server);
+});
+
+add_identity_test(this, function* test_backoff503() {
+ _("Test: HTTP 503 with Retry-After header leads to backoff notification and sets backoff status.");
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ const BACKOFF = 42;
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 503,
+ headers: {"retry-after": BACKOFF}};
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function (subject) {
+ backoffInterval = subject;
+ });
+
+ try {
+ do_check_false(Status.enforceBackoff);
+
+ do_check_true(generateAndUploadKeys(server));
+
+ Service.login();
+ Service.sync();
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, BACKOFF);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ } finally {
+ Status.resetBackoff();
+ Status.resetSync();
+ Service.startOver();
+ }
+ yield promiseStopServer(server);
+});
+
+add_identity_test(this, function* test_overQuota() {
+ _("Test: HTTP 400 with body error code 14 means over quota.");
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 400,
+ toString() {
+ return "14";
+ }};
+
+ try {
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+
+ do_check_true(generateAndUploadKeys(server));
+
+ Service.login();
+ Service.sync();
+
+ do_check_eq(Status.sync, OVER_QUOTA);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ } finally {
+ Status.resetSync();
+ Service.startOver();
+ }
+ yield promiseStopServer(server);
+});
+
+add_identity_test(this, function* test_service_networkError() {
+ _("Test: Connection refused error from Service.sync() leads to the right status code.");
+ let server = sync_httpd_setup();
+ yield setUp(server);
+ let deferred = Promise.defer();
+ server.stop(() => {
+ // Provoke connection refused.
+ Service.clusterURL = "http://localhost:12345/";
+
+ try {
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+
+ Service._loggedIn = true;
+ Service.sync();
+
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_eq(Status.service, SYNC_FAILED);
+ } finally {
+ Status.resetSync();
+ Service.startOver();
+ }
+ deferred.resolve();
+ });
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_service_offline() {
+ _("Test: Wanting to sync in offline mode leads to the right status code but does not increment the ignorable error count.");
+ let server = sync_httpd_setup();
+ yield setUp(server);
+ let deferred = Promise.defer();
+ server.stop(() => {
+ Services.io.offline = true;
+ Services.prefs.setBoolPref("network.dns.offline-localhost", false);
+
+ try {
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+
+ Service._loggedIn = true;
+ Service.sync();
+
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_eq(Status.service, SYNC_FAILED);
+ } finally {
+ Status.resetSync();
+ Service.startOver();
+ }
+ Services.io.offline = false;
+ Services.prefs.clearUserPref("network.dns.offline-localhost");
+ deferred.resolve();
+ });
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_engine_networkError() {
+ _("Test: Network related exceptions from engine.sync() lead to the right status code.");
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = Components.Exception("NS_ERROR_UNKNOWN_HOST",
+ Cr.NS_ERROR_UNKNOWN_HOST);
+
+ try {
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+
+ do_check_true(generateAndUploadKeys(server));
+
+ Service.login();
+ Service.sync();
+
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ } finally {
+ Status.resetSync();
+ Service.startOver();
+ }
+ yield promiseStopServer(server);
+});
+
+add_identity_test(this, function* test_resource_timeout() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ let engine = engineManager.get("catapult");
+ engine.enabled = true;
+ // Resource throws this when it encounters a timeout.
+ engine.exception = Components.Exception("Aborting due to channel inactivity.",
+ Cr.NS_ERROR_NET_TIMEOUT);
+
+ try {
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+
+ do_check_true(generateAndUploadKeys(server));
+
+ Service.login();
+ Service.sync();
+
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ } finally {
+ Status.resetSync();
+ Service.startOver();
+ }
+ yield promiseStopServer(server);
+});
+
+function run_test() {
+ validate_all_future_pings();
+ engineManager.register(CatapultEngine);
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_extension_storage_crypto.js b/services/sync/tests/unit/test_extension_storage_crypto.js
new file mode 100644
index 000000000..f93e4970d
--- /dev/null
+++ b/services/sync/tests/unit/test_extension_storage_crypto.js
@@ -0,0 +1,93 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://services-sync/engines/extension-storage.js");
+Cu.import("resource://services-sync/util.js");
+
+/**
+ * Like Assert.throws, but for generators.
+ *
+ * @param {string | Object | function} constraint
+ * What to use to check the exception.
+ * @param {function} f
+ * The function to call.
+ */
+function* throwsGen(constraint, f) {
+ let threw = false;
+ let exception;
+ try {
+ yield* f();
+ }
+ catch (e) {
+ threw = true;
+ exception = e;
+ }
+
+ ok(threw, "did not throw an exception");
+
+ const debuggingMessage = `got ${exception}, expected ${constraint}`;
+ let message = exception;
+ if (typeof exception === "object") {
+ message = exception.message;
+ }
+
+ if (typeof constraint === "function") {
+ ok(constraint(message), debuggingMessage);
+ } else {
+ ok(constraint === message, debuggingMessage);
+ }
+
+}
+
+/**
+ * An EncryptionRemoteTransformer that uses a fixed key bundle,
+ * suitable for testing.
+ */
+class StaticKeyEncryptionRemoteTransformer extends EncryptionRemoteTransformer {
+ constructor(keyBundle) {
+ super();
+ this.keyBundle = keyBundle;
+ }
+
+ getKeys() {
+ return Promise.resolve(this.keyBundle);
+ }
+}
+const BORING_KB = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
+const STRETCHED_KEY = CryptoUtils.hkdf(BORING_KB, undefined, `testing storage.sync encryption`, 2*32);
+const KEY_BUNDLE = {
+ sha256HMACHasher: Utils.makeHMACHasher(Ci.nsICryptoHMAC.SHA256, Utils.makeHMACKey(STRETCHED_KEY.slice(0, 32))),
+ encryptionKeyB64: btoa(STRETCHED_KEY.slice(32, 64)),
+};
+const transformer = new StaticKeyEncryptionRemoteTransformer(KEY_BUNDLE);
+
+add_task(function* test_encryption_transformer_roundtrip() {
+ const POSSIBLE_DATAS = [
+ "string",
+ 2, // number
+ [1, 2, 3], // array
+ {key: "value"}, // object
+ ];
+
+ for (let data of POSSIBLE_DATAS) {
+ const record = {data: data, id: "key-some_2D_key", key: "some-key"};
+
+ deepEqual(record, yield transformer.decode(yield transformer.encode(record)));
+ }
+});
+
+add_task(function* test_refuses_to_decrypt_tampered() {
+ const encryptedRecord = yield transformer.encode({data: [1, 2, 3], id: "key-some_2D_key", key: "some-key"});
+ const tamperedHMAC = Object.assign({}, encryptedRecord, {hmac: "0000000000000000000000000000000000000000000000000000000000000001"});
+ yield* throwsGen(Utils.isHMACMismatch, function*() {
+ yield transformer.decode(tamperedHMAC);
+ });
+
+ const tamperedIV = Object.assign({}, encryptedRecord, {IV: "aaaaaaaaaaaaaaaaaaaaaa=="});
+ yield* throwsGen(Utils.isHMACMismatch, function*() {
+ yield transformer.decode(tamperedIV);
+ });
+});
diff --git a/services/sync/tests/unit/test_extension_storage_engine.js b/services/sync/tests/unit/test_extension_storage_engine.js
new file mode 100644
index 000000000..1b2792703
--- /dev/null
+++ b/services/sync/tests/unit/test_extension_storage_engine.js
@@ -0,0 +1,62 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/extension-storage.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
+
+Service.engineManager.register(ExtensionStorageEngine);
+const engine = Service.engineManager.get("extension-storage");
+do_get_profile(); // so we can use FxAccounts
+loadWebExtensionTestFunctions();
+
+function mock(options) {
+ let calls = [];
+ let ret = function() {
+ calls.push(arguments);
+ return options.returns;
+ }
+ Object.setPrototypeOf(ret, {
+ __proto__: Function.prototype,
+ get calls() {
+ return calls;
+ }
+ });
+ return ret;
+}
+
+add_task(function* test_calling_sync_calls__sync() {
+ let oldSync = ExtensionStorageEngine.prototype._sync;
+ let syncMock = ExtensionStorageEngine.prototype._sync = mock({returns: true});
+ try {
+ // I wanted to call the main sync entry point for the entire
+ // package, but that fails because it tries to sync ClientEngine
+ // first, which fails.
+ yield engine.sync();
+ } finally {
+ ExtensionStorageEngine.prototype._sync = oldSync;
+ }
+ equal(syncMock.calls.length, 1);
+});
+
+add_task(function* test_calling_sync_calls_ext_storage_sync() {
+ const extension = {id: "my-extension"};
+ let oldSync = ExtensionStorageSync.syncAll;
+ let syncMock = ExtensionStorageSync.syncAll = mock({returns: Promise.resolve()});
+ try {
+ yield* withSyncContext(function* (context) {
+ // Set something so that everyone knows that we're using storage.sync
+ yield ExtensionStorageSync.set(extension, {"a": "b"}, context);
+
+ yield engine._sync();
+ });
+ } finally {
+ ExtensionStorageSync.syncAll = oldSync;
+ }
+ do_check_true(syncMock.calls.length >= 1);
+});
diff --git a/services/sync/tests/unit/test_extension_storage_tracker.js b/services/sync/tests/unit/test_extension_storage_tracker.js
new file mode 100644
index 000000000..fac51a897
--- /dev/null
+++ b/services/sync/tests/unit/test_extension_storage_tracker.js
@@ -0,0 +1,38 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/extension-storage.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
+
+Service.engineManager.register(ExtensionStorageEngine);
+const engine = Service.engineManager.get("extension-storage");
+do_get_profile(); // so we can use FxAccounts
+loadWebExtensionTestFunctions();
+
+add_task(function* test_changing_extension_storage_changes_score() {
+ const tracker = engine._tracker;
+ const extension = {id: "my-extension-id"};
+ Svc.Obs.notify("weave:engine:start-tracking");
+ yield* withSyncContext(function*(context) {
+ yield ExtensionStorageSync.set(extension, {"a": "b"}, context);
+ });
+ do_check_eq(tracker.score, SCORE_INCREMENT_MEDIUM);
+
+ tracker.resetScore();
+ yield* withSyncContext(function*(context) {
+ yield ExtensionStorageSync.remove(extension, "a", context);
+ });
+ do_check_eq(tracker.score, SCORE_INCREMENT_MEDIUM);
+
+ Svc.Obs.notify("weave:engine:stop-tracking");
+});
+
+function run_test() {
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_forms_store.js b/services/sync/tests/unit/test_forms_store.js
new file mode 100644
index 000000000..6963df1c0
--- /dev/null
+++ b/services/sync/tests/unit/test_forms_store.js
@@ -0,0 +1,151 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Make sure the form store follows the Store api and correctly accesses the backend form storage");
+Cu.import("resource://services-sync/engines/forms.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://gre/modules/Services.jsm");
+
+function run_test() {
+ let baseuri = "http://fake/uri/";
+ let engine = new FormEngine(Service);
+ let store = engine._store;
+
+ function applyEnsureNoFailures(records) {
+ do_check_eq(store.applyIncomingBatch(records).length, 0);
+ }
+
+ _("Remove any existing entries");
+ store.wipe();
+ for (let id in store.getAllIDs()) {
+ do_throw("Shouldn't get any ids!");
+ }
+
+ _("Add a form entry");
+ applyEnsureNoFailures([{
+ id: Utils.makeGUID(),
+ name: "name!!",
+ value: "value??"
+ }]);
+
+ _("Should have 1 entry now");
+ let id = "";
+ for (let _id in store.getAllIDs()) {
+ if (id == "")
+ id = _id;
+ else
+ do_throw("Should have only gotten one!");
+ }
+ do_check_true(store.itemExists(id));
+
+ _("Should be able to find this entry as a dupe");
+ do_check_eq(engine._findDupe({name: "name!!", value: "value??"}), id);
+
+ let rec = store.createRecord(id);
+ _("Got record for id", id, rec);
+ do_check_eq(rec.name, "name!!");
+ do_check_eq(rec.value, "value??");
+
+ _("Create a non-existent id for delete");
+ do_check_true(store.createRecord("deleted!!").deleted);
+
+ _("Try updating.. doesn't do anything yet");
+ store.update({});
+
+ _("Remove all entries");
+ store.wipe();
+ for (let id in store.getAllIDs()) {
+ do_throw("Shouldn't get any ids!");
+ }
+
+ _("Add another entry");
+ applyEnsureNoFailures([{
+ id: Utils.makeGUID(),
+ name: "another",
+ value: "entry"
+ }]);
+ id = "";
+ for (let _id in store.getAllIDs()) {
+ if (id == "")
+ id = _id;
+ else
+ do_throw("Should have only gotten one!");
+ }
+
+ _("Change the id of the new entry to something else");
+ store.changeItemID(id, "newid");
+
+ _("Make sure it's there");
+ do_check_true(store.itemExists("newid"));
+
+ _("Remove the entry");
+ store.remove({
+ id: "newid"
+ });
+ for (let id in store.getAllIDs()) {
+ do_throw("Shouldn't get any ids!");
+ }
+
+ _("Removing the entry again shouldn't matter");
+ store.remove({
+ id: "newid"
+ });
+ for (let id in store.getAllIDs()) {
+ do_throw("Shouldn't get any ids!");
+ }
+
+ _("Add another entry to delete using applyIncomingBatch");
+ let toDelete = {
+ id: Utils.makeGUID(),
+ name: "todelete",
+ value: "entry"
+ };
+ applyEnsureNoFailures([toDelete]);
+ id = "";
+ for (let _id in store.getAllIDs()) {
+ if (id == "")
+ id = _id;
+ else
+ do_throw("Should have only gotten one!");
+ }
+ do_check_true(store.itemExists(id));
+ // mark entry as deleted
+ toDelete.id = id;
+ toDelete.deleted = true;
+ applyEnsureNoFailures([toDelete]);
+ for (let id in store.getAllIDs()) {
+ do_throw("Shouldn't get any ids!");
+ }
+
+ _("Add an entry to wipe");
+ applyEnsureNoFailures([{
+ id: Utils.makeGUID(),
+ name: "towipe",
+ value: "entry"
+ }]);
+
+ store.wipe();
+
+ for (let id in store.getAllIDs()) {
+ do_throw("Shouldn't get any ids!");
+ }
+
+ _("Ensure we work if formfill is disabled.");
+ Services.prefs.setBoolPref("browser.formfill.enable", false);
+ try {
+ // a search
+ for (let id in store.getAllIDs()) {
+ do_throw("Shouldn't get any ids!");
+ }
+ // an update.
+ applyEnsureNoFailures([{
+ id: Utils.makeGUID(),
+ name: "some",
+ value: "entry"
+ }]);
+ } finally {
+ Services.prefs.clearUserPref("browser.formfill.enable");
+ store.wipe();
+ }
+}
diff --git a/services/sync/tests/unit/test_forms_tracker.js b/services/sync/tests/unit/test_forms_tracker.js
new file mode 100644
index 000000000..f14e208b3
--- /dev/null
+++ b/services/sync/tests/unit/test_forms_tracker.js
@@ -0,0 +1,72 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/engines/forms.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ _("Verify we've got an empty tracker to work with.");
+ let engine = new FormEngine(Service);
+ let tracker = engine._tracker;
+ // Don't do asynchronous writes.
+ tracker.persistChangedIDs = false;
+
+ do_check_empty(tracker.changedIDs);
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ function addEntry(name, value) {
+ engine._store.create({name: name, value: value});
+ }
+ function removeEntry(name, value) {
+ guid = engine._findDupe({name: name, value: value});
+ engine._store.remove({id: guid});
+ }
+
+ try {
+ _("Create an entry. Won't show because we haven't started tracking yet");
+ addEntry("name", "John Doe");
+ do_check_empty(tracker.changedIDs);
+
+ _("Tell the tracker to start tracking changes.");
+ Svc.Obs.notify("weave:engine:start-tracking");
+ removeEntry("name", "John Doe");
+ addEntry("email", "john@doe.com");
+ do_check_attribute_count(tracker.changedIDs, 2);
+
+ _("Notifying twice won't do any harm.");
+ Svc.Obs.notify("weave:engine:start-tracking");
+ addEntry("address", "Memory Lane");
+ do_check_attribute_count(tracker.changedIDs, 3);
+
+
+ _("Check that ignoreAll is respected");
+ tracker.clearChangedIDs();
+ tracker.score = 0;
+ tracker.ignoreAll = true;
+ addEntry("username", "johndoe123");
+ addEntry("favoritecolor", "green");
+ removeEntry("name", "John Doe");
+ tracker.ignoreAll = false;
+ do_check_empty(tracker.changedIDs);
+ equal(tracker.score, 0);
+
+ _("Let's stop tracking again.");
+ tracker.clearChangedIDs();
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ removeEntry("address", "Memory Lane");
+ do_check_empty(tracker.changedIDs);
+
+ _("Notifying twice won't do any harm.");
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ removeEntry("email", "john@doe.com");
+ do_check_empty(tracker.changedIDs);
+
+
+
+ } finally {
+ _("Clean up.");
+ engine._store.wipe();
+ }
+}
diff --git a/services/sync/tests/unit/test_fxa_migration.js b/services/sync/tests/unit/test_fxa_migration.js
new file mode 100644
index 000000000..0ca770e28
--- /dev/null
+++ b/services/sync/tests/unit/test_fxa_migration.js
@@ -0,0 +1,117 @@
+// We change this pref before anything else initializes
+Services.prefs.setCharPref("identity.fxaccounts.auth.uri", "http://localhost");
+
+// Test the FxAMigration module
+Cu.import("resource://services-sync/FxaMigrator.jsm");
+Cu.import("resource://gre/modules/Promise.jsm");
+
+// Set our username pref early so sync initializes with the legacy provider.
+Services.prefs.setCharPref("services.sync.username", "foo");
+// And ensure all debug messages end up being printed.
+Services.prefs.setCharPref("services.sync.log.appender.dump", "Debug");
+
+// Now import sync
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+
+// And reset the username.
+Services.prefs.clearUserPref("services.sync.username");
+
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://testing-common/services/common/logging.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+
+const FXA_USERNAME = "someone@somewhere";
+
+// Utilities
+function promiseOneObserver(topic) {
+ return new Promise((resolve, reject) => {
+ let observer = function(subject, topic, data) {
+ Services.obs.removeObserver(observer, topic);
+ resolve({ subject: subject, data: data });
+ }
+ Services.obs.addObserver(observer, topic, false);
+ });
+}
+
+function promiseStopServer(server) {
+ return new Promise((resolve, reject) => {
+ server.stop(resolve);
+ });
+}
+
+
+// Helpers
+function configureLegacySync() {
+ let engine = new RotaryEngine(Service);
+ engine.enabled = true;
+ Svc.Prefs.set("registerEngines", engine.name);
+ Svc.Prefs.set("log.logger.engine.rotary", "Trace");
+
+ let contents = {
+ meta: {global: {engines: {rotary: {version: engine.version,
+ syncID: engine.syncID}}}},
+ crypto: {},
+ rotary: {}
+ };
+
+ const USER = "foo";
+ const PASSPHRASE = "abcdeabcdeabcdeabcdeabcdea";
+
+ setBasicCredentials(USER, "password", PASSPHRASE);
+
+ let onRequest = function(request, response) {
+ // ideally we'd only do this while a legacy user is configured, but WTH.
+ response.setHeader("x-weave-alert", JSON.stringify({code: "soft-eol"}));
+ }
+ let server = new SyncServer({onRequest: onRequest});
+ server.registerUser(USER, "password");
+ server.createContents(USER, contents);
+ server.start();
+
+ Service.serverURL = server.baseURI;
+ Service.clusterURL = server.baseURI;
+ Service.identity.username = USER;
+ Service._updateCachedURLs();
+
+ Service.engineManager._engines[engine.name] = engine;
+
+ return [engine, server];
+}
+
+add_task(function *testMigrationUnlinks() {
+
+ // when we do a .startOver we want the new provider.
+ let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity");
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false);
+
+ do_register_cleanup(() => {
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", oldValue)
+ });
+
+ // Arrange for a legacy sync user.
+ let [engine, server] = configureLegacySync();
+
+ // Start a sync - this will cause an EOL notification which the migrator's
+ // observer will notice.
+ let promiseMigration = promiseOneObserver("fxa-migration:state-changed");
+ let promiseStartOver = promiseOneObserver("weave:service:start-over:finish");
+ _("Starting sync");
+ Service.sync();
+ _("Finished sync");
+
+ yield promiseStartOver;
+ yield promiseMigration;
+ // We should have seen the observer and Sync should no longer be configured.
+ Assert.ok(!Services.prefs.prefHasUserValue("services.sync.username"));
+});
+
+function run_test() {
+ initTestLogging();
+ do_register_cleanup(() => {
+ fxaMigrator.finalize();
+ Svc.Prefs.resetBranch("");
+ });
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_fxa_node_reassignment.js b/services/sync/tests/unit/test_fxa_node_reassignment.js
new file mode 100644
index 000000000..3e4cefd53
--- /dev/null
+++ b/services/sync/tests/unit/test_fxa_node_reassignment.js
@@ -0,0 +1,368 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Test that node reassignment happens correctly using the FxA identity mgr.");
+// The node-reassignment logic is quite different for FxA than for the legacy
+// provider. In particular, there's no special request necessary for
+// reassignment - it comes from the token server - so we need to ensure the
+// Fxa cluster manager grabs a new token.
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.clear();
+
+function run_test() {
+ Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Resource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+ initTestLogging();
+
+ Service.engineManager.register(RotaryEngine);
+
+ // Setup the FxA identity manager and cluster manager.
+ Status.__authManager = Service.identity = new BrowserIDManager();
+ Service._clusterManager = Service.identity.createClusterManager(Service);
+
+ // None of the failures in this file should result in a UI error.
+ function onUIError() {
+ do_throw("Errors should not be presented in the UI.");
+ }
+ Svc.Obs.add("weave:ui:login:error", onUIError);
+ Svc.Obs.add("weave:ui:sync:error", onUIError);
+
+ run_next_test();
+}
+
+
+// API-compatible with SyncServer handler. Bind `handler` to something to use
+// as a ServerCollection handler.
+function handleReassign(handler, req, resp) {
+ resp.setStatusLine(req.httpVersion, 401, "Node reassignment");
+ resp.setHeader("Content-Type", "application/json");
+ let reassignBody = JSON.stringify({error: "401inator in place"});
+ resp.bodyOutputStream.write(reassignBody, reassignBody.length);
+}
+
+var numTokenRequests = 0;
+
+function prepareServer(cbAfterTokenFetch) {
+ let config = makeIdentityConfig({username: "johndoe"});
+ // A server callback to ensure we don't accidentally hit the wrong endpoint
+ // after a node reassignment.
+ let callback = {
+ __proto__: SyncServerCallback,
+ onRequest(req, resp) {
+ let full = `${req.scheme}://${req.host}:${req.port}${req.path}`;
+ do_check_true(full.startsWith(config.fxaccount.token.endpoint),
+ `request made to ${full}`);
+ }
+ }
+ let server = new SyncServer(callback);
+ server.registerUser("johndoe");
+ server.start();
+
+ // Set the token endpoint for the initial token request that's done implicitly
+ // via configureIdentity.
+ config.fxaccount.token.endpoint = server.baseURI + "1.1/johndoe/";
+ // And future token fetches will do magic around numReassigns.
+ let numReassigns = 0;
+ return configureIdentity(config).then(() => {
+ Service.identity._tokenServerClient = {
+ getTokenFromBrowserIDAssertion: function(uri, assertion, cb) {
+ // Build a new URL with trailing zeros for the SYNC_VERSION part - this
+ // will still be seen as equivalent by the test server, but different
+ // by sync itself.
+ numReassigns += 1;
+ let trailingZeros = new Array(numReassigns + 1).join('0');
+ let token = config.fxaccount.token;
+ token.endpoint = server.baseURI + "1.1" + trailingZeros + "/johndoe";
+ token.uid = config.username;
+ numTokenRequests += 1;
+ cb(null, token);
+ if (cbAfterTokenFetch) {
+ cbAfterTokenFetch();
+ }
+ },
+ };
+ return server;
+ });
+}
+
+function getReassigned() {
+ try {
+ return Services.prefs.getBoolPref("services.sync.lastSyncReassigned");
+ } catch (ex) {
+ if (ex.result == Cr.NS_ERROR_UNEXPECTED) {
+ return false;
+ }
+ do_throw("Got exception retrieving lastSyncReassigned: " +
+ Log.exceptionStr(ex));
+ }
+}
+
+/**
+ * Make a test request to `url`, then watch the result of two syncs
+ * to ensure that a node request was made.
+ * Runs `between` between the two. This can be used to undo deliberate failure
+ * setup, detach observers, etc.
+ */
+function* syncAndExpectNodeReassignment(server, firstNotification, between,
+ secondNotification, url) {
+ _("Starting syncAndExpectNodeReassignment\n");
+ let deferred = Promise.defer();
+ function onwards() {
+ let numTokenRequestsBefore;
+ function onFirstSync() {
+ _("First sync completed.");
+ Svc.Obs.remove(firstNotification, onFirstSync);
+ Svc.Obs.add(secondNotification, onSecondSync);
+
+ do_check_eq(Service.clusterURL, "");
+
+ // Track whether we fetched a new token.
+ numTokenRequestsBefore = numTokenRequests;
+
+ // Allow for tests to clean up error conditions.
+ between();
+ }
+ function onSecondSync() {
+ _("Second sync completed.");
+ Svc.Obs.remove(secondNotification, onSecondSync);
+ Service.scheduler.clearSyncTriggers();
+
+ // Make absolutely sure that any event listeners are done with their work
+ // before we proceed.
+ waitForZeroTimer(function () {
+ _("Second sync nextTick.");
+ do_check_eq(numTokenRequests, numTokenRequestsBefore + 1, "fetched a new token");
+ Service.startOver();
+ server.stop(deferred.resolve);
+ });
+ }
+
+ Svc.Obs.add(firstNotification, onFirstSync);
+ Service.sync();
+ }
+
+ // Make sure that we really do get a 401 (but we can only do that if we are
+ // already logged in, as the login process is what sets up the URLs)
+ if (Service.isLoggedIn) {
+ _("Making request to " + url + " which should 401");
+ let request = new RESTRequest(url);
+ request.get(function () {
+ do_check_eq(request.response.status, 401);
+ Utils.nextTick(onwards);
+ });
+ } else {
+ _("Skipping preliminary validation check for a 401 as we aren't logged in");
+ Utils.nextTick(onwards);
+ }
+ yield deferred.promise;
+}
+
+// Check that when we sync we don't request a new token by default - our
+// test setup has configured the client with a valid token, and that token
+// should be used to form the cluster URL.
+add_task(function* test_single_token_fetch() {
+ _("Test a normal sync only fetches 1 token");
+
+ let numTokenFetches = 0;
+
+ function afterTokenFetch() {
+ numTokenFetches++;
+ }
+
+ // Set the cluster URL to an "old" version - this is to ensure we don't
+ // use that old cached version for the first sync but prefer the value
+ // we got from the token (and as above, we are also checking we don't grab
+ // a new token). If the test actually attempts to connect to this URL
+ // it will crash.
+ Service.clusterURL = "http://example.com/";
+
+ let server = yield prepareServer(afterTokenFetch);
+
+ do_check_false(Service.isLoggedIn, "not already logged in");
+ Service.sync();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
+ do_check_eq(numTokenFetches, 0, "didn't fetch a new token");
+ // A bit hacky, but given we know how prepareServer works we can deduce
+ // that clusterURL we expect.
+ let expectedClusterURL = server.baseURI + "1.1/johndoe/";
+ do_check_eq(Service.clusterURL, expectedClusterURL);
+ yield new Promise(resolve => server.stop(resolve));
+});
+
+add_task(function* test_momentary_401_engine() {
+ _("Test a failure for engine URLs that's resolved by reassignment.");
+ let server = yield prepareServer();
+ let john = server.user("johndoe");
+
+ _("Enabling the Rotary engine.");
+ let engine = Service.engineManager.get("rotary");
+ engine.enabled = true;
+
+ // We need the server to be correctly set up prior to experimenting. Do this
+ // through a sync.
+ let global = {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ rotary: {version: engine.version,
+ syncID: engine.syncID}}
+ john.createCollection("meta").insert("global", global);
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Setting up Rotary collection to 401.");
+ let rotary = john.createCollection("rotary");
+ let oldHandler = rotary.collectionHandler;
+ rotary.collectionHandler = handleReassign.bind(this, undefined);
+
+ // We want to verify that the clusterURL pref has been cleared after a 401
+ // inside a sync. Flag the Rotary engine to need syncing.
+ john.collection("rotary").timestamp += 1000;
+
+ function between() {
+ _("Undoing test changes.");
+ rotary.collectionHandler = oldHandler;
+
+ function onLoginStart() {
+ // lastSyncReassigned shouldn't be cleared until a sync has succeeded.
+ _("Ensuring that lastSyncReassigned is still set at next sync start.");
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ do_check_true(getReassigned());
+ }
+
+ _("Adding observer that lastSyncReassigned is still set on login.");
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+ }
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:finish",
+ between,
+ "weave:service:sync:finish",
+ Service.storageURL + "rotary");
+});
+
+// This test ends up being a failing info fetch *after we're already logged in*.
+add_task(function* test_momentary_401_info_collections_loggedin() {
+ _("Test a failure for info/collections after login that's resolved by reassignment.");
+ let server = yield prepareServer();
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Arrange for info/collections to return a 401.");
+ let oldHandler = server.toplevelHandlers.info;
+ server.toplevelHandlers.info = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.info = oldHandler;
+ }
+
+ do_check_true(Service.isLoggedIn, "already logged in");
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.infoURL);
+});
+
+// This test ends up being a failing info fetch *before we're logged in*.
+// In this case we expect to recover during the login phase - so the first
+// sync succeeds.
+add_task(function* test_momentary_401_info_collections_loggedout() {
+ _("Test a failure for info/collections before login that's resolved by reassignment.");
+
+ let oldHandler;
+ let sawTokenFetch = false;
+
+ function afterTokenFetch() {
+ // After a single token fetch, we undo our evil handleReassign hack, so
+ // the next /info request returns the collection instead of a 401
+ server.toplevelHandlers.info = oldHandler;
+ sawTokenFetch = true;
+ }
+
+ let server = yield prepareServer(afterTokenFetch);
+
+ // Return a 401 for the next /info request - it will be reset immediately
+ // after a new token is fetched.
+ oldHandler = server.toplevelHandlers.info
+ server.toplevelHandlers.info = handleReassign;
+
+ do_check_false(Service.isLoggedIn, "not already logged in");
+
+ Service.sync();
+ do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
+ // sync was successful - check we grabbed a new token.
+ do_check_true(sawTokenFetch, "a new token was fetched by this test.")
+ // and we are done.
+ Service.startOver();
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ yield deferred.promise;
+});
+
+// This test ends up being a failing meta/global fetch *after we're already logged in*.
+add_task(function* test_momentary_401_storage_loggedin() {
+ _("Test a failure for any storage URL after login that's resolved by" +
+ "reassignment.");
+ let server = yield prepareServer();
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Arrange for meta/global to return a 401.");
+ let oldHandler = server.toplevelHandlers.storage;
+ server.toplevelHandlers.storage = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.storage = oldHandler;
+ }
+
+ do_check_true(Service.isLoggedIn, "already logged in");
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.storageURL + "meta/global");
+});
+
+// This test ends up being a failing meta/global fetch *before we've logged in*.
+add_task(function* test_momentary_401_storage_loggedout() {
+ _("Test a failure for any storage URL before login, not just engine parts. " +
+ "Resolved by reassignment.");
+ let server = yield prepareServer();
+
+ // Return a 401 for all storage requests.
+ let oldHandler = server.toplevelHandlers.storage;
+ server.toplevelHandlers.storage = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.storage = oldHandler;
+ }
+
+ do_check_false(Service.isLoggedIn, "already logged in");
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:login:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.storageURL + "meta/global");
+});
diff --git a/services/sync/tests/unit/test_fxa_service_cluster.js b/services/sync/tests/unit/test_fxa_service_cluster.js
new file mode 100644
index 000000000..b4f83a7fe
--- /dev/null
+++ b/services/sync/tests/unit/test_fxa_service_cluster.js
@@ -0,0 +1,68 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/fxa_utils.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+add_task(function* test_findCluster() {
+ _("Test FxA _findCluster()");
+
+ _("_findCluster() throws on 500 errors.");
+ initializeIdentityWithTokenServerResponse({
+ status: 500,
+ headers: [],
+ body: "",
+ });
+
+ yield Service.identity.initializeWithCurrentIdentity();
+ yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
+ "should reject due to 500");
+
+ Assert.throws(function() {
+ Service._clusterManager._findCluster();
+ });
+
+ _("_findCluster() returns null on authentication errors.");
+ initializeIdentityWithTokenServerResponse({
+ status: 401,
+ headers: {"content-type": "application/json"},
+ body: "{}",
+ });
+
+ yield Service.identity.initializeWithCurrentIdentity();
+ yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
+ "should reject due to 401");
+
+ cluster = Service._clusterManager._findCluster();
+ Assert.strictEqual(cluster, null);
+
+ _("_findCluster() works with correct tokenserver response.");
+ let endpoint = "http://example.com/something";
+ initializeIdentityWithTokenServerResponse({
+ status: 200,
+ headers: {"content-type": "application/json"},
+ body:
+ JSON.stringify({
+ api_endpoint: endpoint,
+ duration: 300,
+ id: "id",
+ key: "key",
+ uid: "uid",
+ })
+ });
+
+ yield Service.identity.initializeWithCurrentIdentity();
+ yield Service.identity.whenReadyToAuthenticate.promise;
+ cluster = Service._clusterManager._findCluster();
+ // The cluster manager ensures a trailing "/"
+ Assert.strictEqual(cluster, endpoint + "/");
+
+ Svc.Prefs.resetBranch("");
+});
+
+function run_test() {
+ initTestLogging();
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_fxa_startOver.js b/services/sync/tests/unit/test_fxa_startOver.js
new file mode 100644
index 000000000..629379648
--- /dev/null
+++ b/services/sync/tests/unit/test_fxa_startOver.js
@@ -0,0 +1,63 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/browserid_identity.js");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+add_task(function* test_startover() {
+ let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity", true);
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false);
+
+ ensureLegacyIdentityManager();
+ yield configureIdentity({username: "johndoe"});
+
+ // The boolean flag on the xpcom service should reflect a legacy provider.
+ let xps = Cc["@mozilla.org/weave/service;1"]
+ .getService(Components.interfaces.nsISupports)
+ .wrappedJSObject;
+ do_check_false(xps.fxAccountsEnabled);
+
+ // we expect the "legacy" provider (but can't instanceof that, as BrowserIDManager
+ // extends it)
+ do_check_false(Service.identity instanceof BrowserIDManager);
+
+ Service.serverURL = "https://localhost/";
+ Service.clusterURL = Service.serverURL;
+
+ Service.login();
+ // We should have a cluster URL
+ do_check_true(Service.clusterURL.length > 0);
+
+ // remember some stuff so we can reset it after.
+ let oldIdentity = Service.identity;
+ let oldClusterManager = Service._clusterManager;
+ let deferred = Promise.defer();
+ Services.obs.addObserver(function observeStartOverFinished() {
+ Services.obs.removeObserver(observeStartOverFinished, "weave:service:start-over:finish");
+ deferred.resolve();
+ }, "weave:service:start-over:finish", false);
+
+ Service.startOver();
+ yield deferred.promise; // wait for the observer to fire.
+
+ // the xpcom service should indicate FxA is enabled.
+ do_check_true(xps.fxAccountsEnabled);
+ // should have swapped identities.
+ do_check_true(Service.identity instanceof BrowserIDManager);
+ // should have clobbered the cluster URL
+ do_check_eq(Service.clusterURL, "");
+
+ // we should have thrown away the old identity provider and cluster manager.
+ do_check_neq(oldIdentity, Service.identity);
+ do_check_neq(oldClusterManager, Service._clusterManager);
+
+ // reset the world.
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", oldValue);
+});
diff --git a/services/sync/tests/unit/test_history_engine.js b/services/sync/tests/unit/test_history_engine.js
new file mode 100644
index 000000000..fd5067ce9
--- /dev/null
+++ b/services/sync/tests/unit/test_history_engine.js
@@ -0,0 +1,147 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines/history.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.clear();
+
+add_test(function test_setup() {
+ PlacesTestUtils.clearHistory().then(run_next_test);
+});
+
+add_test(function test_processIncoming_mobile_history_batched() {
+ _("SyncEngine._processIncoming works on history engine.");
+
+ let FAKE_DOWNLOAD_LIMIT = 100;
+
+ Svc.Prefs.set("client.type", "mobile");
+ Service.engineManager.register(HistoryEngine);
+
+ // A collection that logs each GET
+ let collection = new ServerCollection();
+ collection.get_log = [];
+ collection._get = collection.get;
+ collection.get = function (options) {
+ this.get_log.push(options);
+ return this._get(options);
+ };
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/history": collection.handler()
+ });
+
+ new SyncTestingInfrastructure(server);
+
+ // Let's create some 234 server side history records. They're all at least
+ // 10 minutes old.
+ let visitType = Ci.nsINavHistoryService.TRANSITION_LINK;
+ for (var i = 0; i < 234; i++) {
+ let id = 'record-no' + ("00" + i).slice(-3);
+ let modified = Date.now()/1000 - 60*(i+10);
+ let payload = encryptPayload({
+ id: id,
+ histUri: "http://foo/bar?" + id,
+ title: id,
+ sortindex: i,
+ visits: [{date: (modified - 5) * 1000000, type: visitType}],
+ deleted: false});
+
+ let wbo = new ServerWBO(id, payload);
+ wbo.modified = modified;
+ collection.insertWBO(wbo);
+ }
+
+ let engine = Service.engineManager.get("history");
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {history: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ _("On a mobile client, we get new records from the server in batches of 50.");
+ engine._syncStartup();
+
+ // Fake a lower limit.
+ engine.downloadLimit = FAKE_DOWNLOAD_LIMIT;
+ _("Last modified: " + engine.lastModified);
+ _("Processing...");
+ engine._processIncoming();
+
+ _("Last modified: " + engine.lastModified);
+ engine._syncFinish();
+
+ // Back to the normal limit.
+ _("Running again. Should fetch none, because of lastModified");
+ engine.downloadLimit = MAX_HISTORY_DOWNLOAD;
+ _("Processing...");
+ engine._processIncoming();
+
+ _("Last modified: " + engine.lastModified);
+ _("Running again. Expecting to pull everything");
+
+ engine.lastModified = undefined;
+ engine.lastSync = 0;
+ _("Processing...");
+ engine._processIncoming();
+
+ _("Last modified: " + engine.lastModified);
+
+ // Verify that the right number of GET requests with the right
+ // kind of parameters were made.
+ do_check_eq(collection.get_log.length,
+ // First try:
+ 1 + // First 50...
+ 1 + // 1 GUID fetch...
+ // 1 fetch...
+ Math.ceil((FAKE_DOWNLOAD_LIMIT - 50) / MOBILE_BATCH_SIZE) +
+ // Second try: none
+ // Third try:
+ 1 + // First 50...
+ 1 + // 1 GUID fetch...
+ // 4 fetch...
+ Math.ceil((234 - 50) / MOBILE_BATCH_SIZE));
+
+ // Check the structure of each HTTP request.
+ do_check_eq(collection.get_log[0].full, 1);
+ do_check_eq(collection.get_log[0].limit, MOBILE_BATCH_SIZE);
+ do_check_eq(collection.get_log[1].full, undefined);
+ do_check_eq(collection.get_log[1].sort, "index");
+ do_check_eq(collection.get_log[1].limit, FAKE_DOWNLOAD_LIMIT);
+ do_check_eq(collection.get_log[2].full, 1);
+ do_check_eq(collection.get_log[3].full, 1);
+ do_check_eq(collection.get_log[3].limit, MOBILE_BATCH_SIZE);
+ do_check_eq(collection.get_log[4].full, undefined);
+ do_check_eq(collection.get_log[4].sort, "index");
+ do_check_eq(collection.get_log[4].limit, MAX_HISTORY_DOWNLOAD);
+ for (let i = 0; i <= Math.floor((234 - 50) / MOBILE_BATCH_SIZE); i++) {
+ let j = i + 5;
+ do_check_eq(collection.get_log[j].full, 1);
+ do_check_eq(collection.get_log[j].limit, undefined);
+ if (i < Math.floor((234 - 50) / MOBILE_BATCH_SIZE))
+ do_check_eq(collection.get_log[j].ids.length, MOBILE_BATCH_SIZE);
+ else
+ do_check_eq(collection.get_log[j].ids.length, 234 % MOBILE_BATCH_SIZE);
+ }
+
+ } finally {
+ PlacesTestUtils.clearHistory().then(() => {
+ server.stop(do_test_finished);
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ });
+ }
+});
+
+function run_test() {
+ generateNewKeys(Service.collectionKeys);
+
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_history_store.js b/services/sync/tests/unit/test_history_store.js
new file mode 100644
index 000000000..207b621e0
--- /dev/null
+++ b/services/sync/tests/unit/test_history_store.js
@@ -0,0 +1,297 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/engines/history.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+const TIMESTAMP1 = (Date.now() - 103406528) * 1000;
+const TIMESTAMP2 = (Date.now() - 6592903) * 1000;
+const TIMESTAMP3 = (Date.now() - 123894) * 1000;
+
+function queryPlaces(uri, options) {
+ let query = PlacesUtils.history.getNewQuery();
+ query.uri = uri;
+ let res = PlacesUtils.history.executeQuery(query, options);
+ res.root.containerOpen = true;
+
+ let results = [];
+ for (let i = 0; i < res.root.childCount; i++)
+ results.push(res.root.getChild(i));
+ res.root.containerOpen = false;
+ return results;
+}
+
+function queryHistoryVisits(uri) {
+ let options = PlacesUtils.history.getNewQueryOptions();
+ options.queryType = Ci.nsINavHistoryQueryOptions.QUERY_TYPE_HISTORY;
+ options.resultType = Ci.nsINavHistoryQueryOptions.RESULTS_AS_VISIT;
+ options.sortingMode = Ci.nsINavHistoryQueryOptions.SORT_BY_DATE_ASCENDING;
+ return queryPlaces(uri, options);
+}
+
+function onNextTitleChanged(callback) {
+ PlacesUtils.history.addObserver({
+ onBeginUpdateBatch: function onBeginUpdateBatch() {},
+ onEndUpdateBatch: function onEndUpdateBatch() {},
+ onPageChanged: function onPageChanged() {},
+ onTitleChanged: function onTitleChanged() {
+ PlacesUtils.history.removeObserver(this);
+ Utils.nextTick(callback);
+ },
+ onVisit: function onVisit() {},
+ onDeleteVisits: function onDeleteVisits() {},
+ onPageExpired: function onPageExpired() {},
+ onDeleteURI: function onDeleteURI() {},
+ onClearHistory: function onClearHistory() {},
+ QueryInterface: XPCOMUtils.generateQI([
+ Ci.nsINavHistoryObserver,
+ Ci.nsINavHistoryObserver_MOZILLA_1_9_1_ADDITIONS,
+ Ci.nsISupportsWeakReference
+ ])
+ }, true);
+}
+
+// Ensure exceptions from inside callbacks leads to test failures while
+// we still clean up properly.
+function ensureThrows(func) {
+ return function() {
+ try {
+ func.apply(this, arguments);
+ } catch (ex) {
+ PlacesTestUtils.clearHistory();
+ do_throw(ex);
+ }
+ };
+}
+
+var store = new HistoryEngine(Service)._store;
+function applyEnsureNoFailures(records) {
+ do_check_eq(store.applyIncomingBatch(records).length, 0);
+}
+
+var fxuri, fxguid, tburi, tbguid;
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+add_test(function test_store() {
+ _("Verify that we've got an empty store to work with.");
+ do_check_empty(store.getAllIDs());
+
+ _("Let's create an entry in the database.");
+ fxuri = Utils.makeURI("http://getfirefox.com/");
+
+ let place = {
+ uri: fxuri,
+ title: "Get Firefox!",
+ visits: [{
+ visitDate: TIMESTAMP1,
+ transitionType: Ci.nsINavHistoryService.TRANSITION_LINK
+ }]
+ };
+ PlacesUtils.asyncHistory.updatePlaces(place, {
+ handleError: function handleError() {
+ do_throw("Unexpected error in adding visit.");
+ },
+ handleResult: function handleResult() {},
+ handleCompletion: onVisitAdded
+ });
+
+ function onVisitAdded() {
+ _("Verify that the entry exists.");
+ let ids = Object.keys(store.getAllIDs());
+ do_check_eq(ids.length, 1);
+ fxguid = ids[0];
+ do_check_true(store.itemExists(fxguid));
+
+ _("If we query a non-existent record, it's marked as deleted.");
+ let record = store.createRecord("non-existent");
+ do_check_true(record.deleted);
+
+ _("Verify createRecord() returns a complete record.");
+ record = store.createRecord(fxguid);
+ do_check_eq(record.histUri, fxuri.spec);
+ do_check_eq(record.title, "Get Firefox!");
+ do_check_eq(record.visits.length, 1);
+ do_check_eq(record.visits[0].date, TIMESTAMP1);
+ do_check_eq(record.visits[0].type, Ci.nsINavHistoryService.TRANSITION_LINK);
+
+ _("Let's modify the record and have the store update the database.");
+ let secondvisit = {date: TIMESTAMP2,
+ type: Ci.nsINavHistoryService.TRANSITION_TYPED};
+ onNextTitleChanged(ensureThrows(function() {
+ let queryres = queryHistoryVisits(fxuri);
+ do_check_eq(queryres.length, 2);
+ do_check_eq(queryres[0].time, TIMESTAMP1);
+ do_check_eq(queryres[0].title, "Hol Dir Firefox!");
+ do_check_eq(queryres[1].time, TIMESTAMP2);
+ do_check_eq(queryres[1].title, "Hol Dir Firefox!");
+ run_next_test();
+ }));
+ applyEnsureNoFailures([
+ {id: fxguid,
+ histUri: record.histUri,
+ title: "Hol Dir Firefox!",
+ visits: [record.visits[0], secondvisit]}
+ ]);
+ }
+});
+
+add_test(function test_store_create() {
+ _("Create a brand new record through the store.");
+ tbguid = Utils.makeGUID();
+ tburi = Utils.makeURI("http://getthunderbird.com");
+ onNextTitleChanged(ensureThrows(function() {
+ do_check_attribute_count(store.getAllIDs(), 2);
+ let queryres = queryHistoryVisits(tburi);
+ do_check_eq(queryres.length, 1);
+ do_check_eq(queryres[0].time, TIMESTAMP3);
+ do_check_eq(queryres[0].title, "The bird is the word!");
+ run_next_test();
+ }));
+ applyEnsureNoFailures([
+ {id: tbguid,
+ histUri: tburi.spec,
+ title: "The bird is the word!",
+ visits: [{date: TIMESTAMP3,
+ type: Ci.nsINavHistoryService.TRANSITION_TYPED}]}
+ ]);
+});
+
+add_test(function test_null_title() {
+ _("Make sure we handle a null title gracefully (it can happen in some cases, e.g. for resource:// URLs)");
+ let resguid = Utils.makeGUID();
+ let resuri = Utils.makeURI("unknown://title");
+ applyEnsureNoFailures([
+ {id: resguid,
+ histUri: resuri.spec,
+ title: null,
+ visits: [{date: TIMESTAMP3,
+ type: Ci.nsINavHistoryService.TRANSITION_TYPED}]}
+ ]);
+ do_check_attribute_count(store.getAllIDs(), 3);
+ let queryres = queryHistoryVisits(resuri);
+ do_check_eq(queryres.length, 1);
+ do_check_eq(queryres[0].time, TIMESTAMP3);
+ run_next_test();
+});
+
+add_test(function test_invalid_records() {
+ _("Make sure we handle invalid URLs in places databases gracefully.");
+ let connection = PlacesUtils.history
+ .QueryInterface(Ci.nsPIPlacesDatabase)
+ .DBConnection;
+ let stmt = connection.createAsyncStatement(
+ "INSERT INTO moz_places "
+ + "(url, url_hash, title, rev_host, visit_count, last_visit_date) "
+ + "VALUES ('invalid-uri', hash('invalid-uri'), 'Invalid URI', '.', 1, " + TIMESTAMP3 + ")"
+ );
+ Async.querySpinningly(stmt);
+ stmt.finalize();
+ // Add the corresponding visit to retain database coherence.
+ stmt = connection.createAsyncStatement(
+ "INSERT INTO moz_historyvisits "
+ + "(place_id, visit_date, visit_type, session) "
+ + "VALUES ((SELECT id FROM moz_places WHERE url_hash = hash('invalid-uri') AND url = 'invalid-uri'), "
+ + TIMESTAMP3 + ", " + Ci.nsINavHistoryService.TRANSITION_TYPED + ", 1)"
+ );
+ Async.querySpinningly(stmt);
+ stmt.finalize();
+ do_check_attribute_count(store.getAllIDs(), 4);
+
+ _("Make sure we report records with invalid URIs.");
+ let invalid_uri_guid = Utils.makeGUID();
+ let failed = store.applyIncomingBatch([{
+ id: invalid_uri_guid,
+ histUri: ":::::::::::::::",
+ title: "Doesn't have a valid URI",
+ visits: [{date: TIMESTAMP3,
+ type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
+ ]);
+ do_check_eq(failed.length, 1);
+ do_check_eq(failed[0], invalid_uri_guid);
+
+ _("Make sure we handle records with invalid GUIDs gracefully (ignore).");
+ applyEnsureNoFailures([
+ {id: "invalid",
+ histUri: "http://invalid.guid/",
+ title: "Doesn't have a valid GUID",
+ visits: [{date: TIMESTAMP3,
+ type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
+ ]);
+
+ _("Make sure we handle records with invalid visit codes or visit dates, gracefully ignoring those visits.");
+ let no_date_visit_guid = Utils.makeGUID();
+ let no_type_visit_guid = Utils.makeGUID();
+ let invalid_type_visit_guid = Utils.makeGUID();
+ let non_integer_visit_guid = Utils.makeGUID();
+ failed = store.applyIncomingBatch([
+ {id: no_date_visit_guid,
+ histUri: "http://no.date.visit/",
+ title: "Visit has no date",
+ visits: [{type: Ci.nsINavHistoryService.TRANSITION_EMBED}]},
+ {id: no_type_visit_guid,
+ histUri: "http://no.type.visit/",
+ title: "Visit has no type",
+ visits: [{date: TIMESTAMP3}]},
+ {id: invalid_type_visit_guid,
+ histUri: "http://invalid.type.visit/",
+ title: "Visit has invalid type",
+ visits: [{date: TIMESTAMP3,
+ type: Ci.nsINavHistoryService.TRANSITION_LINK - 1}]},
+ {id: non_integer_visit_guid,
+ histUri: "http://non.integer.visit/",
+ title: "Visit has non-integer date",
+ visits: [{date: 1234.567,
+ type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
+ ]);
+ do_check_eq(failed.length, 0);
+
+ _("Make sure we handle records with javascript: URLs gracefully.");
+ applyEnsureNoFailures([
+ {id: Utils.makeGUID(),
+ histUri: "javascript:''",
+ title: "javascript:''",
+ visits: [{date: TIMESTAMP3,
+ type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
+ ]);
+
+ _("Make sure we handle records without any visits gracefully.");
+ applyEnsureNoFailures([
+ {id: Utils.makeGUID(),
+ histUri: "http://getfirebug.com",
+ title: "Get Firebug!",
+ visits: []}
+ ]);
+
+ run_next_test();
+});
+
+add_test(function test_remove() {
+ _("Remove an existent record and a non-existent from the store.");
+ applyEnsureNoFailures([{id: fxguid, deleted: true},
+ {id: Utils.makeGUID(), deleted: true}]);
+ do_check_false(store.itemExists(fxguid));
+ let queryres = queryHistoryVisits(fxuri);
+ do_check_eq(queryres.length, 0);
+
+ _("Make sure wipe works.");
+ store.wipe();
+ do_check_empty(store.getAllIDs());
+ queryres = queryHistoryVisits(fxuri);
+ do_check_eq(queryres.length, 0);
+ queryres = queryHistoryVisits(tburi);
+ do_check_eq(queryres.length, 0);
+ run_next_test();
+});
+
+add_test(function cleanup() {
+ _("Clean up.");
+ PlacesTestUtils.clearHistory().then(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_history_tracker.js b/services/sync/tests/unit/test_history_tracker.js
new file mode 100644
index 000000000..5ed022fb0
--- /dev/null
+++ b/services/sync/tests/unit/test_history_tracker.js
@@ -0,0 +1,203 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines/history.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+function onScoreUpdated(callback) {
+ Svc.Obs.add("weave:engine:score:updated", function observer() {
+ Svc.Obs.remove("weave:engine:score:updated", observer);
+ try {
+ callback();
+ } catch (ex) {
+ do_throw(ex);
+ }
+ });
+}
+
+Service.engineManager.clear();
+Service.engineManager.register(HistoryEngine);
+var engine = Service.engineManager.get("history");
+var tracker = engine._tracker;
+
+// Don't write out by default.
+tracker.persistChangedIDs = false;
+
+var _counter = 0;
+function addVisit() {
+ let uriString = "http://getfirefox.com/" + _counter++;
+ let uri = Utils.makeURI(uriString);
+ _("Adding visit for URI " + uriString);
+ let place = {
+ uri: uri,
+ visits: [ {
+ visitDate: Date.now() * 1000,
+ transitionType: PlacesUtils.history.TRANSITION_LINK
+ } ]
+ };
+
+ let cb = Async.makeSpinningCallback();
+ PlacesUtils.asyncHistory.updatePlaces(place, {
+ handleError: function () {
+ _("Error adding visit for " + uriString);
+ cb(new Error("Error adding history entry"));
+ },
+
+ handleResult: function () {
+ },
+
+ handleCompletion: function () {
+ _("Added visit for " + uriString);
+ cb();
+ }
+ });
+
+ // Spin the event loop to embed this async call in a sync API.
+ cb.wait();
+ return uri;
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Tracker.History").level = Log.Level.Trace;
+ run_next_test();
+}
+
+add_test(function test_empty() {
+ _("Verify we've got an empty, disabled tracker to work with.");
+ do_check_empty(tracker.changedIDs);
+ do_check_eq(tracker.score, 0);
+ do_check_false(tracker._isTracking);
+ run_next_test();
+});
+
+add_test(function test_not_tracking(next) {
+ _("Create history item. Won't show because we haven't started tracking yet");
+ addVisit();
+ Utils.nextTick(function() {
+ do_check_empty(tracker.changedIDs);
+ do_check_eq(tracker.score, 0);
+ run_next_test();
+ });
+});
+
+add_test(function test_start_tracking() {
+ _("Add hook for save completion.");
+ tracker.persistChangedIDs = true;
+ tracker.onSavedChangedIDs = function () {
+ _("changedIDs written to disk. Proceeding.");
+ // Turn this back off.
+ tracker.persistChangedIDs = false;
+ delete tracker.onSavedChangedIDs;
+ run_next_test();
+ };
+
+ _("Tell the tracker to start tracking changes.");
+ onScoreUpdated(function() {
+ _("Score updated in test_start_tracking.");
+ do_check_attribute_count(tracker.changedIDs, 1);
+ do_check_eq(tracker.score, SCORE_INCREMENT_SMALL);
+ });
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+ addVisit();
+});
+
+add_test(function test_start_tracking_twice() {
+ _("Verifying preconditions from test_start_tracking.");
+ do_check_attribute_count(tracker.changedIDs, 1);
+ do_check_eq(tracker.score, SCORE_INCREMENT_SMALL);
+
+ _("Notifying twice won't do any harm.");
+ onScoreUpdated(function() {
+ _("Score updated in test_start_tracking_twice.");
+ do_check_attribute_count(tracker.changedIDs, 2);
+ do_check_eq(tracker.score, 2 * SCORE_INCREMENT_SMALL);
+ run_next_test();
+ });
+
+ Svc.Obs.notify("weave:engine:start-tracking");
+ addVisit();
+});
+
+add_test(function test_track_delete() {
+ _("Deletions are tracked.");
+
+ // This isn't present because we weren't tracking when it was visited.
+ let uri = Utils.makeURI("http://getfirefox.com/0");
+ let guid = engine._store.GUIDForUri(uri);
+ do_check_false(guid in tracker.changedIDs);
+
+ onScoreUpdated(function() {
+ do_check_true(guid in tracker.changedIDs);
+ do_check_attribute_count(tracker.changedIDs, 3);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE + 2 * SCORE_INCREMENT_SMALL);
+ run_next_test();
+ });
+
+ do_check_eq(tracker.score, 2 * SCORE_INCREMENT_SMALL);
+ PlacesUtils.history.removePage(uri);
+});
+
+add_test(function test_dont_track_expiration() {
+ _("Expirations are not tracked.");
+ let uriToExpire = addVisit();
+ let guidToExpire = engine._store.GUIDForUri(uriToExpire);
+ let uriToRemove = addVisit();
+ let guidToRemove = engine._store.GUIDForUri(uriToRemove);
+
+ tracker.clearChangedIDs();
+ do_check_false(guidToExpire in tracker.changedIDs);
+ do_check_false(guidToRemove in tracker.changedIDs);
+
+ onScoreUpdated(function() {
+ do_check_false(guidToExpire in tracker.changedIDs);
+ do_check_true(guidToRemove in tracker.changedIDs);
+ do_check_attribute_count(tracker.changedIDs, 1);
+ run_next_test();
+ });
+
+ // Observe expiration.
+ Services.obs.addObserver(function onExpiration(aSubject, aTopic, aData) {
+ Services.obs.removeObserver(onExpiration, aTopic);
+ // Remove the remaining page to update its score.
+ PlacesUtils.history.removePage(uriToRemove);
+ }, PlacesUtils.TOPIC_EXPIRATION_FINISHED, false);
+
+ // Force expiration of 1 entry.
+ Services.prefs.setIntPref("places.history.expiration.max_pages", 0);
+ Cc["@mozilla.org/places/expiration;1"]
+ .getService(Ci.nsIObserver)
+ .observe(null, "places-debug-start-expiration", 1);
+});
+
+add_test(function test_stop_tracking() {
+ _("Let's stop tracking again.");
+ tracker.clearChangedIDs();
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ addVisit();
+ Utils.nextTick(function() {
+ do_check_empty(tracker.changedIDs);
+ run_next_test();
+ });
+});
+
+add_test(function test_stop_tracking_twice() {
+ _("Notifying twice won't do any harm.");
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ addVisit();
+ Utils.nextTick(function() {
+ do_check_empty(tracker.changedIDs);
+ run_next_test();
+ });
+});
+
+add_test(function cleanup() {
+ _("Clean up.");
+ PlacesTestUtils.clearHistory().then(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_hmac_error.js b/services/sync/tests/unit/test_hmac_error.js
new file mode 100644
index 000000000..272c0de47
--- /dev/null
+++ b/services/sync/tests/unit/test_hmac_error.js
@@ -0,0 +1,248 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+// Track HMAC error counts.
+var hmacErrorCount = 0;
+(function () {
+ let hHE = Service.handleHMACEvent;
+ Service.handleHMACEvent = function () {
+ hmacErrorCount++;
+ return hHE.call(Service);
+ };
+})();
+
+function shared_setup() {
+ hmacErrorCount = 0;
+
+ // Do not instantiate SyncTestingInfrastructure; we need real crypto.
+ ensureLegacyIdentityManager();
+ setBasicCredentials("foo", "foo", "aabcdeabcdeabcdeabcdeabcde");
+
+ // Make sure RotaryEngine is the only one we sync.
+ Service.engineManager._engines = {};
+ Service.engineManager.register(RotaryEngine);
+ let engine = Service.engineManager.get("rotary");
+ engine.enabled = true;
+ engine.lastSync = 123; // Needs to be non-zero so that tracker is queried.
+ engine._store.items = {flying: "LNER Class A3 4472",
+ scotsman: "Flying Scotsman"};
+ engine._tracker.addChangedID('scotsman', 0);
+ do_check_eq(1, Service.engineManager.getEnabled().length);
+
+ let engines = {rotary: {version: engine.version,
+ syncID: engine.syncID},
+ clients: {version: Service.clientsEngine.version,
+ syncID: Service.clientsEngine.syncID}};
+
+ // Common server objects.
+ let global = new ServerWBO("global", {engines: engines});
+ let keysWBO = new ServerWBO("keys");
+ let rotaryColl = new ServerCollection({}, true);
+ let clientsColl = new ServerCollection({}, true);
+
+ return [engine, rotaryColl, clientsColl, keysWBO, global];
+}
+
+add_task(function *hmac_error_during_404() {
+ _("Attempt to replicate the HMAC error setup.");
+ let [engine, rotaryColl, clientsColl, keysWBO, global] = shared_setup();
+
+ // Hand out 404s for crypto/keys.
+ let keysHandler = keysWBO.handler();
+ let key404Counter = 0;
+ let keys404Handler = function (request, response) {
+ if (key404Counter > 0) {
+ let body = "Not Found";
+ response.setStatusLine(request.httpVersion, 404, body);
+ response.bodyOutputStream.write(body, body.length);
+ key404Counter--;
+ return;
+ }
+ keysHandler(request, response);
+ };
+
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+ let collections = collectionsHelper.collections;
+ let handlers = {
+ "/1.1/foo/info/collections": collectionsHelper.handler,
+ "/1.1/foo/storage/meta/global": upd("meta", global.handler()),
+ "/1.1/foo/storage/crypto/keys": upd("crypto", keys404Handler),
+ "/1.1/foo/storage/clients": upd("clients", clientsColl.handler()),
+ "/1.1/foo/storage/rotary": upd("rotary", rotaryColl.handler())
+ };
+
+ let server = sync_httpd_setup(handlers);
+ Service.serverURL = server.baseURI;
+
+ try {
+ _("Syncing.");
+ yield sync_and_validate_telem();
+
+ _("Partially resetting client, as if after a restart, and forcing redownload.");
+ Service.collectionKeys.clear();
+ engine.lastSync = 0; // So that we redownload records.
+ key404Counter = 1;
+ _("---------------------------");
+ yield sync_and_validate_telem();
+ _("---------------------------");
+
+ // Two rotary items, one client record... no errors.
+ do_check_eq(hmacErrorCount, 0)
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ yield new Promise(resolve => server.stop(resolve));
+ }
+});
+
+add_test(function hmac_error_during_node_reassignment() {
+ _("Attempt to replicate an HMAC error during node reassignment.");
+ let [engine, rotaryColl, clientsColl, keysWBO, global] = shared_setup();
+
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+
+ // We'll provide a 401 mid-way through the sync. This function
+ // simulates shifting to a node which has no data.
+ function on401() {
+ _("Deleting server data...");
+ global.delete();
+ rotaryColl.delete();
+ keysWBO.delete();
+ clientsColl.delete();
+ delete collectionsHelper.collections.rotary;
+ delete collectionsHelper.collections.crypto;
+ delete collectionsHelper.collections.clients;
+ _("Deleted server data.");
+ }
+
+ let should401 = false;
+ function upd401(coll, handler) {
+ return function (request, response) {
+ if (should401 && (request.method != "DELETE")) {
+ on401();
+ should401 = false;
+ let body = "\"reassigned!\"";
+ response.setStatusLine(request.httpVersion, 401, "Node reassignment.");
+ response.bodyOutputStream.write(body, body.length);
+ return;
+ }
+ handler(request, response);
+ };
+ }
+
+ function sameNodeHandler(request, response) {
+ // Set this so that _setCluster will think we've really changed.
+ let url = Service.serverURL.replace("localhost", "LOCALHOST");
+ _("Client requesting reassignment; pointing them to " + url);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(url, url.length);
+ }
+
+ let handlers = {
+ "/user/1.0/foo/node/weave": sameNodeHandler,
+ "/1.1/foo/info/collections": collectionsHelper.handler,
+ "/1.1/foo/storage/meta/global": upd("meta", global.handler()),
+ "/1.1/foo/storage/crypto/keys": upd("crypto", keysWBO.handler()),
+ "/1.1/foo/storage/clients": upd401("clients", clientsColl.handler()),
+ "/1.1/foo/storage/rotary": upd("rotary", rotaryColl.handler())
+ };
+
+ let server = sync_httpd_setup(handlers);
+ Service.serverURL = server.baseURI;
+ _("Syncing.");
+ // First hit of clients will 401. This will happen after meta/global and
+ // keys -- i.e., in the middle of the sync, but before RotaryEngine.
+ should401 = true;
+
+ // Use observers to perform actions when our sync finishes.
+ // This allows us to observe the automatic next-tick sync that occurs after
+ // an abort.
+ function onSyncError() {
+ do_throw("Should not get a sync error!");
+ }
+ function onSyncFinished() {}
+ let obs = {
+ observe: function observe(subject, topic, data) {
+ switch (topic) {
+ case "weave:service:sync:error":
+ onSyncError();
+ break;
+ case "weave:service:sync:finish":
+ onSyncFinished();
+ break;
+ }
+ }
+ };
+
+ Svc.Obs.add("weave:service:sync:finish", obs);
+ Svc.Obs.add("weave:service:sync:error", obs);
+
+ // This kicks off the actual test. Split into a function here to allow this
+ // source file to broadly follow actual execution order.
+ function onwards() {
+ _("== Invoking first sync.");
+ Service.sync();
+ _("We should not simultaneously have data but no keys on the server.");
+ let hasData = rotaryColl.wbo("flying") ||
+ rotaryColl.wbo("scotsman");
+ let hasKeys = keysWBO.modified;
+
+ _("We correctly handle 401s by aborting the sync and starting again.");
+ do_check_true(!hasData == !hasKeys);
+
+ _("Be prepared for the second (automatic) sync...");
+ }
+
+ _("Make sure that syncing again causes recovery.");
+ onSyncFinished = function() {
+ _("== First sync done.");
+ _("---------------------------");
+ onSyncFinished = function() {
+ _("== Second (automatic) sync done.");
+ hasData = rotaryColl.wbo("flying") ||
+ rotaryColl.wbo("scotsman");
+ hasKeys = keysWBO.modified;
+ do_check_true(!hasData == !hasKeys);
+
+ // Kick off another sync. Can't just call it, because we're inside the
+ // lock...
+ Utils.nextTick(function() {
+ _("Now a fresh sync will get no HMAC errors.");
+ _("Partially resetting client, as if after a restart, and forcing redownload.");
+ Service.collectionKeys.clear();
+ engine.lastSync = 0;
+ hmacErrorCount = 0;
+
+ onSyncFinished = function() {
+ // Two rotary items, one client record... no errors.
+ do_check_eq(hmacErrorCount, 0)
+
+ Svc.Obs.remove("weave:service:sync:finish", obs);
+ Svc.Obs.remove("weave:service:sync:error", obs);
+
+ Svc.Prefs.resetBranch("");
+ Service.recordManager.clearCache();
+ server.stop(run_next_test);
+ };
+
+ Service.sync();
+ },
+ this);
+ };
+ };
+
+ onwards();
+});
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_httpd_sync_server.js b/services/sync/tests/unit/test_httpd_sync_server.js
new file mode 100644
index 000000000..943dbfd73
--- /dev/null
+++ b/services/sync/tests/unit/test_httpd_sync_server.js
@@ -0,0 +1,285 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ Log.repository.getLogger("Sync.Test.Server").level = Log.Level.Trace;
+ initTestLogging();
+ run_next_test();
+}
+
+add_test(function test_creation() {
+ // Explicit callback for this one.
+ let server = new SyncServer({
+ __proto__: SyncServerCallback,
+ });
+ do_check_true(!!server); // Just so we have a check.
+ server.start(null, function () {
+ _("Started on " + server.port);
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_url_parsing() {
+ let server = new SyncServer();
+
+ // Check that we can parse a WBO URI.
+ let parts = server.pathRE.exec("/1.1/johnsmith/storage/crypto/keys");
+ let [all, version, username, first, rest] = parts;
+ do_check_eq(all, "/1.1/johnsmith/storage/crypto/keys");
+ do_check_eq(version, "1.1");
+ do_check_eq(username, "johnsmith");
+ do_check_eq(first, "storage");
+ do_check_eq(rest, "crypto/keys");
+ do_check_eq(null, server.pathRE.exec("/nothing/else"));
+
+ // Check that we can parse a collection URI.
+ parts = server.pathRE.exec("/1.1/johnsmith/storage/crypto");
+ [all, version, username, first, rest] = parts;
+ do_check_eq(all, "/1.1/johnsmith/storage/crypto");
+ do_check_eq(version, "1.1");
+ do_check_eq(username, "johnsmith");
+ do_check_eq(first, "storage");
+ do_check_eq(rest, "crypto");
+
+ // We don't allow trailing slash on storage URI.
+ parts = server.pathRE.exec("/1.1/johnsmith/storage/");
+ do_check_eq(parts, undefined);
+
+ // storage alone is a valid request.
+ parts = server.pathRE.exec("/1.1/johnsmith/storage");
+ [all, version, username, first, rest] = parts;
+ do_check_eq(all, "/1.1/johnsmith/storage");
+ do_check_eq(version, "1.1");
+ do_check_eq(username, "johnsmith");
+ do_check_eq(first, "storage");
+ do_check_eq(rest, undefined);
+
+ parts = server.storageRE.exec("storage");
+ let storage, collection, id;
+ [all, storage, collection, id] = parts;
+ do_check_eq(all, "storage");
+ do_check_eq(collection, undefined);
+
+ run_next_test();
+});
+
+Cu.import("resource://services-common/rest.js");
+function localRequest(server, path) {
+ _("localRequest: " + path);
+ let url = server.baseURI.substr(0, server.baseURI.length - 1) + path;
+ _("url: " + url);
+ return new RESTRequest(url);
+}
+
+add_test(function test_basic_http() {
+ let server = new SyncServer();
+ server.registerUser("john", "password");
+ do_check_true(server.userExists("john"));
+ server.start(null, function () {
+ _("Started on " + server.port);
+ Utils.nextTick(function () {
+ let req = localRequest(server, "/1.1/john/storage/crypto/keys");
+ _("req is " + req);
+ req.get(function (err) {
+ do_check_eq(null, err);
+ Utils.nextTick(function () {
+ server.stop(run_next_test);
+ });
+ });
+ });
+ });
+});
+
+add_test(function test_info_collections() {
+ let server = new SyncServer({
+ __proto__: SyncServerCallback
+ });
+ function responseHasCorrectHeaders(r) {
+ do_check_eq(r.status, 200);
+ do_check_eq(r.headers["content-type"], "application/json");
+ do_check_true("x-weave-timestamp" in r.headers);
+ }
+
+ server.registerUser("john", "password");
+ server.start(null, function () {
+ Utils.nextTick(function () {
+ let req = localRequest(server, "/1.1/john/info/collections");
+ req.get(function (err) {
+ // Initial info/collections fetch is empty.
+ do_check_eq(null, err);
+ responseHasCorrectHeaders(this.response);
+
+ do_check_eq(this.response.body, "{}");
+ Utils.nextTick(function () {
+ // When we PUT something to crypto/keys, "crypto" appears in the response.
+ function cb(err) {
+ do_check_eq(null, err);
+ responseHasCorrectHeaders(this.response);
+ let putResponseBody = this.response.body;
+ _("PUT response body: " + JSON.stringify(putResponseBody));
+
+ req = localRequest(server, "/1.1/john/info/collections");
+ req.get(function (err) {
+ do_check_eq(null, err);
+ responseHasCorrectHeaders(this.response);
+ let expectedColl = server.getCollection("john", "crypto");
+ do_check_true(!!expectedColl);
+ let modified = expectedColl.timestamp;
+ do_check_true(modified > 0);
+ do_check_eq(putResponseBody, modified);
+ do_check_eq(JSON.parse(this.response.body).crypto, modified);
+ Utils.nextTick(function () {
+ server.stop(run_next_test);
+ });
+ });
+ }
+ let payload = JSON.stringify({foo: "bar"});
+ localRequest(server, "/1.1/john/storage/crypto/keys").put(payload, cb);
+ });
+ });
+ });
+ });
+});
+
+add_test(function test_storage_request() {
+ let keysURL = "/1.1/john/storage/crypto/keys?foo=bar";
+ let foosURL = "/1.1/john/storage/crypto/foos";
+ let storageURL = "/1.1/john/storage";
+
+ let server = new SyncServer();
+ let creation = server.timestamp();
+ server.registerUser("john", "password");
+
+ server.createContents("john", {
+ crypto: {foos: {foo: "bar"}}
+ });
+ let coll = server.user("john").collection("crypto");
+ do_check_true(!!coll);
+
+ _("We're tracking timestamps.");
+ do_check_true(coll.timestamp >= creation);
+
+ function retrieveWBONotExists(next) {
+ let req = localRequest(server, keysURL);
+ req.get(function (err) {
+ _("Body is " + this.response.body);
+ _("Modified is " + this.response.newModified);
+ do_check_eq(null, err);
+ do_check_eq(this.response.status, 404);
+ do_check_eq(this.response.body, "Not found");
+ Utils.nextTick(next);
+ });
+ }
+ function retrieveWBOExists(next) {
+ let req = localRequest(server, foosURL);
+ req.get(function (err) {
+ _("Body is " + this.response.body);
+ _("Modified is " + this.response.newModified);
+ let parsedBody = JSON.parse(this.response.body);
+ do_check_eq(parsedBody.id, "foos");
+ do_check_eq(parsedBody.modified, coll.wbo("foos").modified);
+ do_check_eq(JSON.parse(parsedBody.payload).foo, "bar");
+ Utils.nextTick(next);
+ });
+ }
+ function deleteWBONotExists(next) {
+ let req = localRequest(server, keysURL);
+ server.callback.onItemDeleted = function (username, collection, wboID) {
+ do_throw("onItemDeleted should not have been called.");
+ };
+
+ req.delete(function (err) {
+ _("Body is " + this.response.body);
+ _("Modified is " + this.response.newModified);
+ do_check_eq(this.response.status, 200);
+ delete server.callback.onItemDeleted;
+ Utils.nextTick(next);
+ });
+ }
+ function deleteWBOExists(next) {
+ let req = localRequest(server, foosURL);
+ server.callback.onItemDeleted = function (username, collection, wboID) {
+ _("onItemDeleted called for " + collection + "/" + wboID);
+ delete server.callback.onItemDeleted;
+ do_check_eq(username, "john");
+ do_check_eq(collection, "crypto");
+ do_check_eq(wboID, "foos");
+ Utils.nextTick(next);
+ };
+
+ req.delete(function (err) {
+ _("Body is " + this.response.body);
+ _("Modified is " + this.response.newModified);
+ do_check_eq(this.response.status, 200);
+ });
+ }
+ function deleteStorage(next) {
+ _("Testing DELETE on /storage.");
+ let now = server.timestamp();
+ _("Timestamp: " + now);
+ let req = localRequest(server, storageURL);
+ req.delete(function (err) {
+ _("Body is " + this.response.body);
+ _("Modified is " + this.response.newModified);
+ let parsedBody = JSON.parse(this.response.body);
+ do_check_true(parsedBody >= now);
+ do_check_empty(server.users["john"].collections);
+ Utils.nextTick(next);
+ });
+ }
+ function getStorageFails(next) {
+ _("Testing that GET on /storage fails.");
+ let req = localRequest(server, storageURL);
+ req.get(function (err) {
+ do_check_eq(this.response.status, 405);
+ do_check_eq(this.response.headers["allow"], "DELETE");
+ Utils.nextTick(next);
+ });
+ }
+ function getMissingCollectionWBO(next) {
+ _("Testing that fetching a WBO from an on-existent collection 404s.");
+ let req = localRequest(server, storageURL + "/foobar/baz");
+ req.get(function (err) {
+ do_check_eq(this.response.status, 404);
+ Utils.nextTick(next);
+ });
+ }
+
+ server.start(null,
+ Async.chain(
+ retrieveWBONotExists,
+ retrieveWBOExists,
+ deleteWBOExists,
+ deleteWBONotExists,
+ getStorageFails,
+ getMissingCollectionWBO,
+ deleteStorage,
+ server.stop.bind(server),
+ run_next_test
+ ));
+});
+
+add_test(function test_x_weave_records() {
+ let server = new SyncServer();
+ server.registerUser("john", "password");
+
+ server.createContents("john", {
+ crypto: {foos: {foo: "bar"},
+ bars: {foo: "baz"}}
+ });
+ server.start(null, function () {
+ let wbo = localRequest(server, "/1.1/john/storage/crypto/foos");
+ wbo.get(function (err) {
+ // WBO fetches don't have one.
+ do_check_false("x-weave-records" in this.response.headers);
+ let col = localRequest(server, "/1.1/john/storage/crypto");
+ col.get(function (err) {
+ // Collection fetches do.
+ do_check_eq(this.response.headers["x-weave-records"], "2");
+ server.stop(run_next_test);
+ });
+ });
+ });
+});
diff --git a/services/sync/tests/unit/test_identity_manager.js b/services/sync/tests/unit/test_identity_manager.js
new file mode 100644
index 000000000..1ac198ade
--- /dev/null
+++ b/services/sync/tests/unit/test_identity_manager.js
@@ -0,0 +1,284 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/util.js");
+
+var identity = new IdentityManager();
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Identity").level = Log.Level.Trace;
+
+ run_next_test();
+}
+
+add_test(function test_username_from_account() {
+ _("Ensure usernameFromAccount works properly.");
+
+ do_check_eq(identity.usernameFromAccount(null), null);
+ do_check_eq(identity.usernameFromAccount("user"), "user");
+ do_check_eq(identity.usernameFromAccount("User"), "user");
+ do_check_eq(identity.usernameFromAccount("john@doe.com"),
+ "7wohs32cngzuqt466q3ge7indszva4of");
+
+ run_next_test();
+});
+
+add_test(function test_account_username() {
+ _("Ensure the account and username attributes work properly.");
+
+ _("Verify initial state");
+ do_check_eq(Svc.Prefs.get("account"), undefined);
+ do_check_eq(Svc.Prefs.get("username"), undefined);
+ do_check_eq(identity.account, null);
+ do_check_eq(identity.username, null);
+
+ _("The 'username' attribute is normalized to lower case, updates preferences and identities.");
+ identity.username = "TarZan";
+ do_check_eq(identity.username, "tarzan");
+ do_check_eq(Svc.Prefs.get("username"), "tarzan");
+ do_check_eq(identity.username, "tarzan");
+
+ _("If not set, the 'account attribute' falls back to the username for backwards compatibility.");
+ do_check_eq(identity.account, "tarzan");
+
+ _("Setting 'username' to a non-truthy value resets the pref.");
+ identity.username = null;
+ do_check_eq(identity.username, null);
+ do_check_eq(identity.account, null);
+ const default_marker = {};
+ do_check_eq(Svc.Prefs.get("username", default_marker), default_marker);
+ do_check_eq(identity.username, null);
+
+ _("The 'account' attribute will set the 'username' if it doesn't contain characters that aren't allowed in the username.");
+ identity.account = "johndoe";
+ do_check_eq(identity.account, "johndoe");
+ do_check_eq(identity.username, "johndoe");
+ do_check_eq(Svc.Prefs.get("username"), "johndoe");
+ do_check_eq(identity.username, "johndoe");
+
+ _("If 'account' contains disallowed characters such as @, 'username' will the base32 encoded SHA1 hash of 'account'");
+ identity.account = "John@Doe.com";
+ do_check_eq(identity.account, "john@doe.com");
+ do_check_eq(identity.username, "7wohs32cngzuqt466q3ge7indszva4of");
+
+ _("Setting 'account' to a non-truthy value resets the pref.");
+ identity.account = null;
+ do_check_eq(identity.account, null);
+ do_check_eq(Svc.Prefs.get("account", default_marker), default_marker);
+ do_check_eq(identity.username, null);
+ do_check_eq(Svc.Prefs.get("username", default_marker), default_marker);
+
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+});
+
+add_test(function test_basic_password() {
+ _("Ensure basic password setting works as expected.");
+
+ identity.account = null;
+ do_check_eq(identity.currentAuthState, LOGIN_FAILED_NO_USERNAME);
+ let thrown = false;
+ try {
+ identity.basicPassword = "foobar";
+ } catch (ex) {
+ thrown = true;
+ }
+
+ do_check_true(thrown);
+ thrown = false;
+
+ identity.account = "johndoe";
+ do_check_eq(identity.currentAuthState, LOGIN_FAILED_NO_PASSWORD);
+ identity.basicPassword = "password";
+ do_check_eq(identity.basicPassword, "password");
+ do_check_eq(identity.currentAuthState, LOGIN_FAILED_NO_PASSPHRASE);
+ do_check_true(identity.hasBasicCredentials());
+
+ identity.account = null;
+
+ run_next_test();
+});
+
+add_test(function test_basic_password_persistence() {
+ _("Ensure credentials are saved and restored to the login manager properly.");
+
+ // Just in case.
+ identity.account = null;
+ identity.deleteSyncCredentials();
+
+ identity.account = "janesmith";
+ identity.basicPassword = "ilovejohn";
+ identity.persistCredentials();
+
+ let im1 = new IdentityManager();
+ do_check_eq(im1._basicPassword, null);
+ do_check_eq(im1.username, "janesmith");
+ do_check_eq(im1.basicPassword, "ilovejohn");
+
+ let im2 = new IdentityManager();
+ do_check_eq(im2._basicPassword, null);
+
+ _("Now remove the password and ensure it is deleted from storage.");
+ identity.basicPassword = null;
+ identity.persistCredentials(); // This should nuke from storage.
+ do_check_eq(im2.basicPassword, null);
+
+ _("Ensure that retrieving an unset but unpersisted removal returns null.");
+ identity.account = "janesmith";
+ identity.basicPassword = "myotherpassword";
+ identity.persistCredentials();
+
+ identity.basicPassword = null;
+ do_check_eq(identity.basicPassword, null);
+
+ // Reset for next test.
+ identity.account = null;
+ identity.persistCredentials();
+
+ run_next_test();
+});
+
+add_test(function test_sync_key() {
+ _("Ensure Sync Key works as advertised.");
+
+ _("Ensure setting a Sync Key before an account throws.");
+ let thrown = false;
+ try {
+ identity.syncKey = "blahblah";
+ } catch (ex) {
+ thrown = true;
+ }
+ do_check_true(thrown);
+ thrown = false;
+
+ identity.account = "johnsmith";
+ identity.basicPassword = "johnsmithpw";
+
+ do_check_eq(identity.syncKey, null);
+ do_check_eq(identity.syncKeyBundle, null);
+
+ _("An invalid Sync Key is silently accepted for historical reasons.");
+ identity.syncKey = "synckey";
+ do_check_eq(identity.syncKey, "synckey");
+
+ _("But the SyncKeyBundle should not be created from bad keys.");
+ do_check_eq(identity.syncKeyBundle, null);
+
+ let syncKey = Utils.generatePassphrase();
+ identity.syncKey = syncKey;
+ do_check_eq(identity.syncKey, syncKey);
+ do_check_neq(identity.syncKeyBundle, null);
+
+ let im = new IdentityManager();
+ im.account = "pseudojohn";
+ do_check_eq(im.syncKey, null);
+ do_check_eq(im.syncKeyBundle, null);
+
+ identity.account = null;
+
+ run_next_test();
+});
+
+add_test(function test_sync_key_changes() {
+ _("Ensure changes to Sync Key have appropriate side-effects.");
+
+ let im = new IdentityManager();
+ let sk1 = Utils.generatePassphrase();
+ let sk2 = Utils.generatePassphrase();
+
+ im.account = "johndoe";
+ do_check_eq(im.syncKey, null);
+ do_check_eq(im.syncKeyBundle, null);
+
+ im.syncKey = sk1;
+ do_check_neq(im.syncKeyBundle, null);
+
+ let ek1 = im.syncKeyBundle.encryptionKeyB64;
+ let hk1 = im.syncKeyBundle.hmacKeyB64;
+
+ // Change the Sync Key and ensure the Sync Key Bundle is updated.
+ im.syncKey = sk2;
+ let ek2 = im.syncKeyBundle.encryptionKeyB64;
+ let hk2 = im.syncKeyBundle.hmacKeyB64;
+
+ do_check_neq(ek1, ek2);
+ do_check_neq(hk1, hk2);
+
+ im.account = null;
+
+ run_next_test();
+});
+
+add_test(function test_current_auth_state() {
+ _("Ensure current auth state is reported properly.");
+
+ let im = new IdentityManager();
+ do_check_eq(im.currentAuthState, LOGIN_FAILED_NO_USERNAME);
+
+ im.account = "johndoe";
+ do_check_eq(im.currentAuthState, LOGIN_FAILED_NO_PASSWORD);
+
+ im.basicPassword = "ilovejane";
+ do_check_eq(im.currentAuthState, LOGIN_FAILED_NO_PASSPHRASE);
+
+ im.syncKey = "foobar";
+ do_check_eq(im.currentAuthState, LOGIN_FAILED_INVALID_PASSPHRASE);
+
+ im.syncKey = null;
+ do_check_eq(im.currentAuthState, LOGIN_FAILED_NO_PASSPHRASE);
+
+ im.syncKey = Utils.generatePassphrase();
+ do_check_eq(im.currentAuthState, STATUS_OK);
+
+ im.account = null;
+
+ run_next_test();
+});
+
+add_test(function test_sync_key_persistence() {
+ _("Ensure Sync Key persistence works as expected.");
+
+ identity.account = "pseudojohn";
+ identity.password = "supersecret";
+
+ let syncKey = Utils.generatePassphrase();
+ identity.syncKey = syncKey;
+
+ identity.persistCredentials();
+
+ let im = new IdentityManager();
+ im.account = "pseudojohn";
+ do_check_eq(im.syncKey, syncKey);
+ do_check_neq(im.syncKeyBundle, null);
+
+ let kb1 = identity.syncKeyBundle;
+ let kb2 = im.syncKeyBundle;
+
+ do_check_eq(kb1.encryptionKeyB64, kb2.encryptionKeyB64);
+ do_check_eq(kb1.hmacKeyB64, kb2.hmacKeyB64);
+
+ identity.account = null;
+ identity.persistCredentials();
+
+ let im2 = new IdentityManager();
+ im2.account = "pseudojohn";
+ do_check_eq(im2.syncKey, null);
+
+ im2.account = null;
+
+ _("Ensure deleted but not persisted value is retrieved.");
+ identity.account = "someoneelse";
+ identity.syncKey = Utils.generatePassphrase();
+ identity.persistCredentials();
+ identity.syncKey = null;
+ do_check_eq(identity.syncKey, null);
+
+ // Clean up.
+ identity.account = null;
+ identity.persistCredentials();
+
+ run_next_test();
+});
diff --git a/services/sync/tests/unit/test_interval_triggers.js b/services/sync/tests/unit/test_interval_triggers.js
new file mode 100644
index 000000000..eca5ec289
--- /dev/null
+++ b/services/sync/tests/unit/test_interval_triggers.js
@@ -0,0 +1,450 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Svc.DefaultPrefs.set("registerEngines", "");
+Cu.import("resource://services-sync/service.js");
+
+var scheduler = Service.scheduler;
+var clientsEngine = Service.clientsEngine;
+
+// Don't remove stale clients when syncing. This is a test-only workaround
+// that lets us add clients directly to the store, without losing them on
+// the next sync.
+clientsEngine._removeRemoteClient = id => {};
+
+function promiseStopServer(server) {
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ return deferred.promise;
+}
+
+function sync_httpd_setup() {
+ let global = new ServerWBO("global", {
+ syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {clients: {version: clientsEngine.version,
+ syncID: clientsEngine.syncID}}
+ });
+ let clientsColl = new ServerCollection({}, true);
+
+ // Tracking info/collections.
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+
+ return httpd_setup({
+ "/1.1/johndoe/storage/meta/global": upd("meta", global.handler()),
+ "/1.1/johndoe/info/collections": collectionsHelper.handler,
+ "/1.1/johndoe/storage/crypto/keys":
+ upd("crypto", (new ServerWBO("keys")).handler()),
+ "/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler())
+ });
+}
+
+function* setUp(server) {
+ yield configureIdentity({username: "johndoe"});
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ serverKeys.upload(Service.resource(Service.cryptoKeysURL));
+}
+
+function run_test() {
+ initTestLogging("Trace");
+
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+
+ run_next_test();
+}
+
+add_identity_test(this, function* test_successful_sync_adjustSyncInterval() {
+ _("Test successful sync calling adjustSyncInterval");
+ let syncSuccesses = 0;
+ function onSyncFinish() {
+ _("Sync success.");
+ syncSuccesses++;
+ };
+ Svc.Obs.add("weave:service:sync:finish", onSyncFinish);
+
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Confirm defaults
+ do_check_false(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_false(scheduler.hasIncomingItems);
+
+ _("Test as long as numClients <= 1 our sync interval is SINGLE_USER.");
+ // idle == true && numClients <= 1 && hasIncomingItems == false
+ scheduler.idle = true;
+ Service.sync();
+ do_check_eq(syncSuccesses, 1);
+ do_check_true(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ // idle == false && numClients <= 1 && hasIncomingItems == false
+ scheduler.idle = false;
+ Service.sync();
+ do_check_eq(syncSuccesses, 2);
+ do_check_false(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ // idle == false && numClients <= 1 && hasIncomingItems == true
+ scheduler.hasIncomingItems = true;
+ Service.sync();
+ do_check_eq(syncSuccesses, 3);
+ do_check_false(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_true(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ // idle == true && numClients <= 1 && hasIncomingItems == true
+ scheduler.idle = true;
+ Service.sync();
+ do_check_eq(syncSuccesses, 4);
+ do_check_true(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_true(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ _("Test as long as idle && numClients > 1 our sync interval is idleInterval.");
+ // idle == true && numClients > 1 && hasIncomingItems == true
+ Service.clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ Service.sync();
+ do_check_eq(syncSuccesses, 5);
+ do_check_true(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_true(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
+
+ // idle == true && numClients > 1 && hasIncomingItems == false
+ scheduler.hasIncomingItems = false;
+ Service.sync();
+ do_check_eq(syncSuccesses, 6);
+ do_check_true(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
+
+ _("Test non-idle, numClients > 1, no incoming items => activeInterval.");
+ // idle == false && numClients > 1 && hasIncomingItems == false
+ scheduler.idle = false;
+ Service.sync();
+ do_check_eq(syncSuccesses, 7);
+ do_check_false(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+
+ _("Test non-idle, numClients > 1, incoming items => immediateInterval.");
+ // idle == false && numClients > 1 && hasIncomingItems == true
+ scheduler.hasIncomingItems = true;
+ Service.sync();
+ do_check_eq(syncSuccesses, 8);
+ do_check_false(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems); //gets reset to false
+ do_check_eq(scheduler.syncInterval, scheduler.immediateInterval);
+
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ Service.startOver();
+ yield promiseStopServer(server);
+});
+
+add_identity_test(this, function* test_unsuccessful_sync_adjustSyncInterval() {
+ _("Test unsuccessful sync calling adjustSyncInterval");
+
+ let syncFailures = 0;
+ function onSyncError() {
+ _("Sync error.");
+ syncFailures++;
+ }
+ Svc.Obs.add("weave:service:sync:error", onSyncError);
+
+ _("Test unsuccessful sync calls adjustSyncInterval");
+ // Force sync to fail.
+ Svc.Prefs.set("firstSync", "notReady");
+
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Confirm defaults
+ do_check_false(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_false(scheduler.hasIncomingItems);
+
+ _("Test as long as numClients <= 1 our sync interval is SINGLE_USER.");
+ // idle == true && numClients <= 1 && hasIncomingItems == false
+ scheduler.idle = true;
+ Service.sync();
+ do_check_eq(syncFailures, 1);
+ do_check_true(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ // idle == false && numClients <= 1 && hasIncomingItems == false
+ scheduler.idle = false;
+ Service.sync();
+ do_check_eq(syncFailures, 2);
+ do_check_false(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ // idle == false && numClients <= 1 && hasIncomingItems == true
+ scheduler.hasIncomingItems = true;
+ Service.sync();
+ do_check_eq(syncFailures, 3);
+ do_check_false(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_true(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ // idle == true && numClients <= 1 && hasIncomingItems == true
+ scheduler.idle = true;
+ Service.sync();
+ do_check_eq(syncFailures, 4);
+ do_check_true(scheduler.idle);
+ do_check_false(scheduler.numClients > 1);
+ do_check_true(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ _("Test as long as idle && numClients > 1 our sync interval is idleInterval.");
+ // idle == true && numClients > 1 && hasIncomingItems == true
+ Service.clientsEngine._store.create({id: "foo", cleartext: "bar"});
+
+ Service.sync();
+ do_check_eq(syncFailures, 5);
+ do_check_true(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_true(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
+
+ // idle == true && numClients > 1 && hasIncomingItems == false
+ scheduler.hasIncomingItems = false;
+ Service.sync();
+ do_check_eq(syncFailures, 6);
+ do_check_true(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
+
+ _("Test non-idle, numClients > 1, no incoming items => activeInterval.");
+ // idle == false && numClients > 1 && hasIncomingItems == false
+ scheduler.idle = false;
+ Service.sync();
+ do_check_eq(syncFailures, 7);
+ do_check_false(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+
+ _("Test non-idle, numClients > 1, incoming items => immediateInterval.");
+ // idle == false && numClients > 1 && hasIncomingItems == true
+ scheduler.hasIncomingItems = true;
+ Service.sync();
+ do_check_eq(syncFailures, 8);
+ do_check_false(scheduler.idle);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.hasIncomingItems); //gets reset to false
+ do_check_eq(scheduler.syncInterval, scheduler.immediateInterval);
+
+ Service.startOver();
+ Svc.Obs.remove("weave:service:sync:error", onSyncError);
+ yield promiseStopServer(server);
+});
+
+add_identity_test(this, function* test_back_triggers_sync() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Single device: no sync triggered.
+ scheduler.idle = true;
+ scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
+ do_check_false(scheduler.idle);
+
+ // Multiple devices: sync is triggered.
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ scheduler.updateClientMode();
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+
+ Service.recordManager.clearCache();
+ Svc.Prefs.resetBranch("");
+ scheduler.setDefaults();
+ clientsEngine.resetClient();
+
+ Service.startOver();
+ server.stop(deferred.resolve);
+ });
+
+ scheduler.idle = true;
+ scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
+ do_check_false(scheduler.idle);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_adjust_interval_on_sync_error() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ let syncFailures = 0;
+ function onSyncError() {
+ _("Sync error.");
+ syncFailures++;
+ }
+ Svc.Obs.add("weave:service:sync:error", onSyncError);
+
+ _("Test unsuccessful sync updates client mode & sync intervals");
+ // Force a sync fail.
+ Svc.Prefs.set("firstSync", "notReady");
+
+ do_check_eq(syncFailures, 0);
+ do_check_false(scheduler.numClients > 1);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ Service.sync();
+
+ do_check_eq(syncFailures, 1);
+ do_check_true(scheduler.numClients > 1);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+
+ Svc.Obs.remove("weave:service:sync:error", onSyncError);
+ Service.startOver();
+ yield promiseStopServer(server);
+});
+
+add_identity_test(this, function* test_bug671378_scenario() {
+ // Test scenario similar to bug 671378. This bug appeared when a score
+ // update occurred that wasn't large enough to trigger a sync so
+ // scheduleNextSync() was called without a time interval parameter,
+ // setting nextSync to a non-zero value and preventing the timer from
+ // being adjusted in the next call to scheduleNextSync().
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ let syncSuccesses = 0;
+ function onSyncFinish() {
+ _("Sync success.");
+ syncSuccesses++;
+ };
+ Svc.Obs.add("weave:service:sync:finish", onSyncFinish);
+
+ // After first sync call, syncInterval & syncTimer are singleDeviceInterval.
+ Service.sync();
+ do_check_eq(syncSuccesses, 1);
+ do_check_false(scheduler.numClients > 1);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
+
+ let deferred = Promise.defer();
+ // Wrap scheduleNextSync so we are notified when it is finished.
+ scheduler._scheduleNextSync = scheduler.scheduleNextSync;
+ scheduler.scheduleNextSync = function() {
+ scheduler._scheduleNextSync();
+
+ // Check on sync:finish scheduleNextSync sets the appropriate
+ // syncInterval and syncTimer values.
+ if (syncSuccesses == 2) {
+ do_check_neq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+ do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
+
+ scheduler.scheduleNextSync = scheduler._scheduleNextSync;
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ Service.startOver();
+ server.stop(deferred.resolve);
+ }
+ };
+
+ // Set nextSync != 0
+ // syncInterval still hasn't been set by call to updateClientMode.
+ // Explicitly trying to invoke scheduleNextSync during a sync
+ // (to immitate a score update that isn't big enough to trigger a sync).
+ Svc.Obs.add("weave:service:sync:start", function onSyncStart() {
+ // Wait for other sync:start observers to be called so that
+ // nextSync is set to 0.
+ Utils.nextTick(function() {
+ Svc.Obs.remove("weave:service:sync:start", onSyncStart);
+
+ scheduler.scheduleNextSync();
+ do_check_neq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
+ });
+ });
+
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_test(function test_adjust_timer_larger_syncInterval() {
+ _("Test syncInterval > current timout period && nextSync != 0, syncInterval is NOT used.");
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ scheduler.updateClientMode();
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+
+ scheduler.scheduleNextSync();
+
+ // Ensure we have a small interval.
+ do_check_neq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncTimer.delay, scheduler.activeInterval);
+
+ // Make interval large again
+ clientsEngine._wipeClient();
+ scheduler.updateClientMode();
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ scheduler.scheduleNextSync();
+
+ // Ensure timer delay remains as the small interval.
+ do_check_neq(scheduler.nextSync, 0);
+ do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
+
+ //SyncSchedule.
+ Service.startOver();
+ run_next_test();
+});
+
+add_test(function test_adjust_timer_smaller_syncInterval() {
+ _("Test current timout > syncInterval period && nextSync != 0, syncInterval is used.");
+ scheduler.scheduleNextSync();
+
+ // Ensure we have a large interval.
+ do_check_neq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
+
+ // Make interval smaller
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ scheduler.updateClientMode();
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+
+ scheduler.scheduleNextSync();
+
+ // Ensure smaller timer delay is used.
+ do_check_neq(scheduler.nextSync, 0);
+ do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
+
+ //SyncSchedule.
+ Service.startOver();
+ run_next_test();
+});
diff --git a/services/sync/tests/unit/test_jpakeclient.js b/services/sync/tests/unit/test_jpakeclient.js
new file mode 100644
index 000000000..783edb460
--- /dev/null
+++ b/services/sync/tests/unit/test_jpakeclient.js
@@ -0,0 +1,562 @@
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/jpakeclient.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+const JPAKE_LENGTH_SECRET = 8;
+const JPAKE_LENGTH_CLIENTID = 256;
+const KEYEXCHANGE_VERSION = 3;
+
+/*
+ * Simple server.
+ */
+
+const SERVER_MAX_GETS = 6;
+
+function check_headers(request) {
+ let stack = Components.stack.caller;
+
+ // There shouldn't be any Basic auth
+ do_check_false(request.hasHeader("Authorization"), stack);
+
+ // Ensure key exchange ID is set and the right length
+ do_check_true(request.hasHeader("X-KeyExchange-Id"), stack);
+ do_check_eq(request.getHeader("X-KeyExchange-Id").length,
+ JPAKE_LENGTH_CLIENTID, stack);
+}
+
+function new_channel() {
+ // Create a new channel and register it with the server.
+ let cid = Math.floor(Math.random() * 10000);
+ while (channels[cid]) {
+ cid = Math.floor(Math.random() * 10000);
+ }
+ let channel = channels[cid] = new ServerChannel();
+ server.registerPathHandler("/" + cid, channel.handler());
+ return cid;
+}
+
+var server;
+var channels = {}; // Map channel -> ServerChannel object
+function server_new_channel(request, response) {
+ check_headers(request);
+ let cid = new_channel();
+ let body = JSON.stringify("" + cid);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+var error_report;
+function server_report(request, response) {
+ check_headers(request);
+
+ if (request.hasHeader("X-KeyExchange-Log")) {
+ error_report = request.getHeader("X-KeyExchange-Log");
+ }
+
+ if (request.hasHeader("X-KeyExchange-Cid")) {
+ let cid = request.getHeader("X-KeyExchange-Cid");
+ let channel = channels[cid];
+ if (channel) {
+ channel.clear();
+ }
+ }
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+}
+
+// Hook for test code.
+var hooks = {};
+function initHooks() {
+ hooks.onGET = function onGET(request) {};
+}
+initHooks();
+
+function ServerChannel() {
+ this.data = "";
+ this.etag = "";
+ this.getCount = 0;
+}
+ServerChannel.prototype = {
+
+ GET: function GET(request, response) {
+ if (!this.data) {
+ response.setStatusLine(request.httpVersion, 404, "Not Found");
+ return;
+ }
+
+ if (request.hasHeader("If-None-Match")) {
+ let etag = request.getHeader("If-None-Match");
+ if (etag == this.etag) {
+ response.setStatusLine(request.httpVersion, 304, "Not Modified");
+ hooks.onGET(request);
+ return;
+ }
+ }
+ response.setHeader("ETag", this.etag);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(this.data, this.data.length);
+
+ // Automatically clear the channel after 6 successful GETs.
+ this.getCount += 1;
+ if (this.getCount == SERVER_MAX_GETS) {
+ this.clear();
+ }
+ hooks.onGET(request);
+ },
+
+ PUT: function PUT(request, response) {
+ if (this.data) {
+ do_check_true(request.hasHeader("If-Match"));
+ let etag = request.getHeader("If-Match");
+ if (etag != this.etag) {
+ response.setHeader("ETag", this.etag);
+ response.setStatusLine(request.httpVersion, 412, "Precondition Failed");
+ return;
+ }
+ } else {
+ do_check_true(request.hasHeader("If-None-Match"));
+ do_check_eq(request.getHeader("If-None-Match"), "*");
+ }
+
+ this.data = readBytesFromInputStream(request.bodyInputStream);
+ this.etag = '"' + Utils.sha1(this.data) + '"';
+ response.setHeader("ETag", this.etag);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ },
+
+ clear: function clear() {
+ delete this.data;
+ },
+
+ handler: function handler() {
+ let self = this;
+ return function(request, response) {
+ check_headers(request);
+ let method = self[request.method];
+ return method.apply(self, arguments);
+ };
+ }
+
+};
+
+
+/**
+ * Controller that throws for everything.
+ */
+var BaseController = {
+ displayPIN: function displayPIN() {
+ do_throw("displayPIN() shouldn't have been called!");
+ },
+ onPairingStart: function onPairingStart() {
+ do_throw("onPairingStart shouldn't have been called!");
+ },
+ onAbort: function onAbort(error) {
+ do_throw("Shouldn't have aborted with " + error + "!");
+ },
+ onPaired: function onPaired() {
+ do_throw("onPaired() shouldn't have been called!");
+ },
+ onComplete: function onComplete(data) {
+ do_throw("Shouldn't have completed with " + data + "!");
+ }
+};
+
+
+const DATA = {"msg": "eggstreamly sekrit"};
+const POLLINTERVAL = 50;
+
+function run_test() {
+ server = httpd_setup({"/new_channel": server_new_channel,
+ "/report": server_report});
+ Svc.Prefs.set("jpake.serverURL", server.baseURI + "/");
+ Svc.Prefs.set("jpake.pollInterval", POLLINTERVAL);
+ Svc.Prefs.set("jpake.maxTries", 2);
+ Svc.Prefs.set("jpake.firstMsgMaxTries", 5);
+ Svc.Prefs.set("jpake.lastMsgMaxTries", 5);
+ // Ensure clean up
+ Svc.Obs.add("profile-before-change", function() {
+ Svc.Prefs.resetBranch("");
+ });
+
+ // Ensure PSM is initialized.
+ Cc["@mozilla.org/psm;1"].getService(Ci.nsISupports);
+
+ // Simulate Sync setup with credentials in place. We want to make
+ // sure the J-PAKE requests don't include those data.
+ ensureLegacyIdentityManager();
+ setBasicCredentials("johndoe", "ilovejane");
+
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.JPAKEClient").level = Log.Level.Trace;
+ Log.repository.getLogger("Common.RESTRequest").level =
+ Log.Level.Trace;
+ run_next_test();
+}
+
+
+add_test(function test_success_receiveNoPIN() {
+ _("Test a successful exchange started by receiveNoPIN().");
+
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onPaired: function onPaired() {
+ _("Pairing successful, sending final payload.");
+ do_check_true(pairingStartCalledOnReceiver);
+ Utils.nextTick(function() { snd.sendAndComplete(DATA); });
+ },
+ onComplete: function onComplete() {}
+ });
+
+ let pairingStartCalledOnReceiver = false;
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ displayPIN: function displayPIN(pin) {
+ _("Received PIN " + pin + ". Entering it in the other computer...");
+ this.cid = pin.slice(JPAKE_LENGTH_SECRET);
+ Utils.nextTick(function() { snd.pairWithPIN(pin, false); });
+ },
+ onPairingStart: function onPairingStart() {
+ pairingStartCalledOnReceiver = true;
+ },
+ onComplete: function onComplete(data) {
+ do_check_true(Utils.deepEquals(DATA, data));
+ // Ensure channel was cleared, no error report.
+ do_check_eq(channels[this.cid].data, undefined);
+ do_check_eq(error_report, undefined);
+ run_next_test();
+ }
+ });
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_firstMsgMaxTries_timeout() {
+ _("Test abort when sender doesn't upload anything.");
+
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ displayPIN: function displayPIN(pin) {
+ _("Received PIN " + pin + ". Doing nothing...");
+ this.cid = pin.slice(JPAKE_LENGTH_SECRET);
+ },
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_TIMEOUT);
+ // Ensure channel was cleared, error report was sent.
+ do_check_eq(channels[this.cid].data, undefined);
+ do_check_eq(error_report, JPAKE_ERROR_TIMEOUT);
+ error_report = undefined;
+ run_next_test();
+ }
+ });
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_firstMsgMaxTries() {
+ _("Test that receiver can wait longer for the first message.");
+
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onPaired: function onPaired() {
+ _("Pairing successful, sending final payload.");
+ Utils.nextTick(function() { snd.sendAndComplete(DATA); });
+ },
+ onComplete: function onComplete() {}
+ });
+
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ displayPIN: function displayPIN(pin) {
+ // For the purpose of the tests, the poll interval is 50ms and
+ // we're polling up to 5 times for the first exchange (as
+ // opposed to 2 times for most of the other exchanges). So let's
+ // pretend it took 150ms to enter the PIN on the sender, which should
+ // require 3 polls.
+ // Rather than using an imprecise timer, we hook into the channel's
+ // GET handler to know how long to wait.
+ _("Received PIN " + pin + ". Waiting for three polls before entering it into sender...");
+ this.cid = pin.slice(JPAKE_LENGTH_SECRET);
+ let count = 0;
+ hooks.onGET = function onGET(request) {
+ if (++count == 3) {
+ _("Third GET. Triggering pair.");
+ Utils.nextTick(function() { snd.pairWithPIN(pin, false); });
+ }
+ };
+ },
+ onPairingStart: function onPairingStart(pin) {},
+ onComplete: function onComplete(data) {
+ do_check_true(Utils.deepEquals(DATA, data));
+ // Ensure channel was cleared, no error report.
+ do_check_eq(channels[this.cid].data, undefined);
+ do_check_eq(error_report, undefined);
+
+ // Clean up.
+ initHooks();
+ run_next_test();
+ }
+ });
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_lastMsgMaxTries() {
+ _("Test that receiver can wait longer for the last message.");
+
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onPaired: function onPaired() {
+ // For the purpose of the tests, the poll interval is 50ms and
+ // we're polling up to 5 times for the last exchange (as opposed
+ // to 2 times for other exchanges). So let's pretend it took
+ // 150ms to come up with the final payload, which should require
+ // 3 polls.
+ // Rather than using an imprecise timer, we hook into the channel's
+ // GET handler to know how long to wait.
+ let count = 0;
+ hooks.onGET = function onGET(request) {
+ if (++count == 3) {
+ _("Third GET. Triggering send.");
+ Utils.nextTick(function() { snd.sendAndComplete(DATA); });
+ }
+ };
+ },
+ onComplete: function onComplete() {}
+ });
+
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ displayPIN: function displayPIN(pin) {
+ _("Received PIN " + pin + ". Entering it in the other computer...");
+ this.cid = pin.slice(JPAKE_LENGTH_SECRET);
+ Utils.nextTick(function() { snd.pairWithPIN(pin, false); });
+ },
+ onPairingStart: function onPairingStart(pin) {},
+ onComplete: function onComplete(data) {
+ do_check_true(Utils.deepEquals(DATA, data));
+ // Ensure channel was cleared, no error report.
+ do_check_eq(channels[this.cid].data, undefined);
+ do_check_eq(error_report, undefined);
+
+ // Clean up.
+ initHooks();
+ run_next_test();
+ }
+ });
+
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_wrongPIN() {
+ _("Test abort when PINs don't match.");
+
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_KEYMISMATCH);
+ do_check_eq(error_report, JPAKE_ERROR_KEYMISMATCH);
+ error_report = undefined;
+ }
+ });
+
+ let pairingStartCalledOnReceiver = false;
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ displayPIN: function displayPIN(pin) {
+ this.cid = pin.slice(JPAKE_LENGTH_SECRET);
+ let secret = pin.slice(0, JPAKE_LENGTH_SECRET);
+ secret = Array.prototype.slice.call(secret).reverse().join("");
+ let new_pin = secret + this.cid;
+ _("Received PIN " + pin + ", but I'm entering " + new_pin);
+
+ Utils.nextTick(function() { snd.pairWithPIN(new_pin, false); });
+ },
+ onPairingStart: function onPairingStart() {
+ pairingStartCalledOnReceiver = true;
+ },
+ onAbort: function onAbort(error) {
+ do_check_true(pairingStartCalledOnReceiver);
+ do_check_eq(error, JPAKE_ERROR_NODATA);
+ // Ensure channel was cleared.
+ do_check_eq(channels[this.cid].data, undefined);
+ run_next_test();
+ }
+ });
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_abort_receiver() {
+ _("Test user abort on receiving side.");
+
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ // Manual abort = userabort.
+ do_check_eq(error, JPAKE_ERROR_USERABORT);
+ // Ensure channel was cleared.
+ do_check_eq(channels[this.cid].data, undefined);
+ do_check_eq(error_report, JPAKE_ERROR_USERABORT);
+ error_report = undefined;
+ run_next_test();
+ },
+ displayPIN: function displayPIN(pin) {
+ this.cid = pin.slice(JPAKE_LENGTH_SECRET);
+ Utils.nextTick(function() { rec.abort(); });
+ }
+ });
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_abort_sender() {
+ _("Test user abort on sending side.");
+
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ // Manual abort == userabort.
+ do_check_eq(error, JPAKE_ERROR_USERABORT);
+ do_check_eq(error_report, JPAKE_ERROR_USERABORT);
+ error_report = undefined;
+ }
+ });
+
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_NODATA);
+ // Ensure channel was cleared, no error report.
+ do_check_eq(channels[this.cid].data, undefined);
+ do_check_eq(error_report, undefined);
+ initHooks();
+ run_next_test();
+ },
+ displayPIN: function displayPIN(pin) {
+ _("Received PIN " + pin + ". Entering it in the other computer...");
+ this.cid = pin.slice(JPAKE_LENGTH_SECRET);
+ Utils.nextTick(function() { snd.pairWithPIN(pin, false); });
+
+ // Abort after the first poll.
+ let count = 0;
+ hooks.onGET = function onGET(request) {
+ if (++count >= 1) {
+ _("First GET. Aborting.");
+ Utils.nextTick(function() { snd.abort(); });
+ }
+ };
+ },
+ onPairingStart: function onPairingStart(pin) {}
+ });
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_wrongmessage() {
+ let cid = new_channel();
+ let channel = channels[cid];
+ channel.data = JSON.stringify({type: "receiver2",
+ version: KEYEXCHANGE_VERSION,
+ payload: {}});
+ channel.etag = '"fake-etag"';
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onComplete: function onComplete(data) {
+ do_throw("onComplete shouldn't be called.");
+ },
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_WRONGMESSAGE);
+ run_next_test();
+ }
+ });
+ snd.pairWithPIN("01234567" + cid, false);
+});
+
+
+add_test(function test_error_channel() {
+ let serverURL = Svc.Prefs.get("jpake.serverURL");
+ Svc.Prefs.set("jpake.serverURL", "http://localhost:12345/");
+
+ let rec = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_CHANNEL);
+ Svc.Prefs.set("jpake.serverURL", serverURL);
+ run_next_test();
+ },
+ onPairingStart: function onPairingStart(pin) {},
+ displayPIN: function displayPIN(pin) {}
+ });
+ rec.receiveNoPIN();
+});
+
+
+add_test(function test_error_network() {
+ let serverURL = Svc.Prefs.get("jpake.serverURL");
+ Svc.Prefs.set("jpake.serverURL", "http://localhost:12345/");
+
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_NETWORK);
+ Svc.Prefs.set("jpake.serverURL", serverURL);
+ run_next_test();
+ }
+ });
+ snd.pairWithPIN("0123456789ab", false);
+});
+
+
+add_test(function test_error_server_noETag() {
+ let cid = new_channel();
+ let channel = channels[cid];
+ channel.data = JSON.stringify({type: "receiver1",
+ version: KEYEXCHANGE_VERSION,
+ payload: {}});
+ // This naughty server doesn't supply ETag (well, it supplies empty one).
+ channel.etag = "";
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_SERVER);
+ run_next_test();
+ }
+ });
+ snd.pairWithPIN("01234567" + cid, false);
+});
+
+
+add_test(function test_error_delayNotSupported() {
+ let cid = new_channel();
+ let channel = channels[cid];
+ channel.data = JSON.stringify({type: "receiver1",
+ version: 2,
+ payload: {}});
+ channel.etag = '"fake-etag"';
+ let snd = new JPAKEClient({
+ __proto__: BaseController,
+ onAbort: function onAbort(error) {
+ do_check_eq(error, JPAKE_ERROR_DELAYUNSUPPORTED);
+ run_next_test();
+ }
+ });
+ snd.pairWithPIN("01234567" + cid, true);
+});
+
+
+add_test(function test_sendAndComplete_notPaired() {
+ let snd = new JPAKEClient({__proto__: BaseController});
+ do_check_throws(function () {
+ snd.sendAndComplete(DATA);
+ });
+ run_next_test();
+});
+
+
+add_test(function tearDown() {
+ server.stop(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_keys.js b/services/sync/tests/unit/test_keys.js
new file mode 100644
index 000000000..a828b619c
--- /dev/null
+++ b/services/sync/tests/unit/test_keys.js
@@ -0,0 +1,326 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/util.js");
+
+var collectionKeys = new CollectionKeyManager();
+
+function sha256HMAC(message, key) {
+ let h = Utils.makeHMACHasher(Ci.nsICryptoHMAC.SHA256, key);
+ return Utils.digestBytes(message, h);
+}
+
+function do_check_keypair_eq(a, b) {
+ do_check_eq(2, a.length);
+ do_check_eq(2, b.length);
+ do_check_eq(a[0], b[0]);
+ do_check_eq(a[1], b[1]);
+}
+
+function test_time_keyFromString(iterations) {
+ let k;
+ let o;
+ let b = new BulkKeyBundle("dummy");
+ let d = Utils.decodeKeyBase32("ababcdefabcdefabcdefabcdef");
+ b.generateRandom();
+
+ _("Running " + iterations + " iterations of hmacKeyObject + sha256HMAC.");
+ for (let i = 0; i < iterations; ++i) {
+ let k = b.hmacKeyObject;
+ o = sha256HMAC(d, k);
+ }
+ do_check_true(!!o);
+ _("Done.");
+}
+
+add_test(function test_set_invalid_values() {
+ _("Ensure that setting invalid encryption and HMAC key values is caught.");
+
+ let bundle = new BulkKeyBundle("foo");
+
+ let thrown = false;
+ try {
+ bundle.encryptionKey = null;
+ } catch (ex) {
+ thrown = true;
+ do_check_eq(ex.message.indexOf("Encryption key can only be set to"), 0);
+ } finally {
+ do_check_true(thrown);
+ thrown = false;
+ }
+
+ try {
+ bundle.encryptionKey = ["trollololol"];
+ } catch (ex) {
+ thrown = true;
+ do_check_eq(ex.message.indexOf("Encryption key can only be set to"), 0);
+ } finally {
+ do_check_true(thrown);
+ thrown = false;
+ }
+
+ try {
+ bundle.hmacKey = Utils.generateRandomBytes(15);
+ } catch (ex) {
+ thrown = true;
+ do_check_eq(ex.message.indexOf("HMAC key must be at least 128"), 0);
+ } finally {
+ do_check_true(thrown);
+ thrown = false;
+ }
+
+ try {
+ bundle.hmacKey = null;
+ } catch (ex) {
+ thrown = true;
+ do_check_eq(ex.message.indexOf("HMAC key can only be set to string"), 0);
+ } finally {
+ do_check_true(thrown);
+ thrown = false;
+ }
+
+ try {
+ bundle.hmacKey = ["trollolol"];
+ } catch (ex) {
+ thrown = true;
+ do_check_eq(ex.message.indexOf("HMAC key can only be set to"), 0);
+ } finally {
+ do_check_true(thrown);
+ thrown = false;
+ }
+
+ try {
+ bundle.hmacKey = Utils.generateRandomBytes(15);
+ } catch (ex) {
+ thrown = true;
+ do_check_eq(ex.message.indexOf("HMAC key must be at least 128"), 0);
+ } finally {
+ do_check_true(thrown);
+ thrown = false;
+ }
+
+ run_next_test();
+});
+
+add_test(function test_repeated_hmac() {
+ let testKey = "ababcdefabcdefabcdefabcdef";
+ let k = Utils.makeHMACKey("foo");
+ let one = sha256HMAC(Utils.decodeKeyBase32(testKey), k);
+ let two = sha256HMAC(Utils.decodeKeyBase32(testKey), k);
+ do_check_eq(one, two);
+
+ run_next_test();
+});
+
+add_test(function test_sync_key_bundle_derivation() {
+ _("Ensure derivation from known values works.");
+
+ // The known values in this test were originally verified against Firefox
+ // Home.
+ let bundle = new SyncKeyBundle("st3fan", "q7ynpwq7vsc9m34hankbyi3s3i");
+
+ // These should be compared to the results from Home, as they once were.
+ let e = "14b8c09fa84e92729ee695160af6e0385f8f6215a25d14906e1747bdaa2de426";
+ let h = "370e3566245d79fe602a3adb5137e42439cd2a571235197e0469d7d541b07875";
+
+ let realE = Utils.bytesAsHex(bundle.encryptionKey);
+ let realH = Utils.bytesAsHex(bundle.hmacKey);
+
+ _("Real E: " + realE);
+ _("Real H: " + realH);
+ do_check_eq(realH, h);
+ do_check_eq(realE, e);
+
+ run_next_test();
+});
+
+add_test(function test_keymanager() {
+ let testKey = "ababcdefabcdefabcdefabcdef";
+ let username = "john@example.com";
+
+ // Decode the key here to mirror what generateEntry will do,
+ // but pass it encoded into the KeyBundle call below.
+
+ let sha256inputE = "" + HMAC_INPUT + username + "\x01";
+ let key = Utils.makeHMACKey(Utils.decodeKeyBase32(testKey));
+ let encryptKey = sha256HMAC(sha256inputE, key);
+
+ let sha256inputH = encryptKey + HMAC_INPUT + username + "\x02";
+ let hmacKey = sha256HMAC(sha256inputH, key);
+
+ // Encryption key is stored in base64 for WeaveCrypto convenience.
+ do_check_eq(encryptKey, new SyncKeyBundle(username, testKey).encryptionKey);
+ do_check_eq(hmacKey, new SyncKeyBundle(username, testKey).hmacKey);
+
+ // Test with the same KeyBundle for both.
+ let obj = new SyncKeyBundle(username, testKey);
+ do_check_eq(hmacKey, obj.hmacKey);
+ do_check_eq(encryptKey, obj.encryptionKey);
+
+ run_next_test();
+});
+
+add_test(function test_collections_manager() {
+ let log = Log.repository.getLogger("Test");
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ let identity = new IdentityManager();
+
+ identity.account = "john@example.com";
+ identity.syncKey = "a-bbbbb-ccccc-ddddd-eeeee-fffff";
+
+ let keyBundle = identity.syncKeyBundle;
+
+ /*
+ * Build a test version of storage/crypto/keys.
+ * Encrypt it with the sync key.
+ * Pass it into the CollectionKeyManager.
+ */
+
+ log.info("Building storage keys...");
+ let storage_keys = new CryptoWrapper("crypto", "keys");
+ let default_key64 = Svc.Crypto.generateRandomKey();
+ let default_hmac64 = Svc.Crypto.generateRandomKey();
+ let bookmarks_key64 = Svc.Crypto.generateRandomKey();
+ let bookmarks_hmac64 = Svc.Crypto.generateRandomKey();
+
+ storage_keys.cleartext = {
+ "default": [default_key64, default_hmac64],
+ "collections": {"bookmarks": [bookmarks_key64, bookmarks_hmac64]},
+ };
+ storage_keys.modified = Date.now()/1000;
+ storage_keys.id = "keys";
+
+ log.info("Encrypting storage keys...");
+
+ // Use passphrase (sync key) itself to encrypt the key bundle.
+ storage_keys.encrypt(keyBundle);
+
+ // Sanity checking.
+ do_check_true(null == storage_keys.cleartext);
+ do_check_true(null != storage_keys.ciphertext);
+
+ log.info("Updating collection keys.");
+
+ // updateContents decrypts the object, releasing the payload for us to use.
+ // Returns true, because the default key has changed.
+ do_check_true(collectionKeys.updateContents(keyBundle, storage_keys));
+ let payload = storage_keys.cleartext;
+
+ _("CK: " + JSON.stringify(collectionKeys._collections));
+
+ // Test that the CollectionKeyManager returns a similar WBO.
+ let wbo = collectionKeys.asWBO("crypto", "keys");
+
+ _("WBO: " + JSON.stringify(wbo));
+ _("WBO cleartext: " + JSON.stringify(wbo.cleartext));
+
+ // Check the individual contents.
+ do_check_eq(wbo.collection, "crypto");
+ do_check_eq(wbo.id, "keys");
+ do_check_eq(undefined, wbo.modified);
+ do_check_eq(collectionKeys.lastModified, storage_keys.modified);
+ do_check_true(!!wbo.cleartext.default);
+ do_check_keypair_eq(payload.default, wbo.cleartext.default);
+ do_check_keypair_eq(payload.collections.bookmarks, wbo.cleartext.collections.bookmarks);
+
+ do_check_true('bookmarks' in collectionKeys._collections);
+ do_check_false('tabs' in collectionKeys._collections);
+
+ _("Updating contents twice with the same data doesn't proceed.");
+ storage_keys.encrypt(keyBundle);
+ do_check_false(collectionKeys.updateContents(keyBundle, storage_keys));
+
+ /*
+ * Test that we get the right keys out when we ask for
+ * a collection's tokens.
+ */
+ let b1 = new BulkKeyBundle("bookmarks");
+ b1.keyPairB64 = [bookmarks_key64, bookmarks_hmac64];
+ let b2 = collectionKeys.keyForCollection("bookmarks");
+ do_check_keypair_eq(b1.keyPair, b2.keyPair);
+
+ // Check key equality.
+ do_check_true(b1.equals(b2));
+ do_check_true(b2.equals(b1));
+
+ b1 = new BulkKeyBundle("[default]");
+ b1.keyPairB64 = [default_key64, default_hmac64];
+
+ do_check_false(b1.equals(b2));
+ do_check_false(b2.equals(b1));
+
+ b2 = collectionKeys.keyForCollection(null);
+ do_check_keypair_eq(b1.keyPair, b2.keyPair);
+
+ /*
+ * Checking for update times.
+ */
+ let info_collections = {};
+ do_check_true(collectionKeys.updateNeeded(info_collections));
+ info_collections["crypto"] = 5000;
+ do_check_false(collectionKeys.updateNeeded(info_collections));
+ info_collections["crypto"] = 1 + (Date.now()/1000); // Add one in case computers are fast!
+ do_check_true(collectionKeys.updateNeeded(info_collections));
+
+ collectionKeys.lastModified = null;
+ do_check_true(collectionKeys.updateNeeded({}));
+
+ /*
+ * Check _compareKeyBundleCollections.
+ */
+ function newBundle(name) {
+ let r = new BulkKeyBundle(name);
+ r.generateRandom();
+ return r;
+ }
+ let k1 = newBundle("k1");
+ let k2 = newBundle("k2");
+ let k3 = newBundle("k3");
+ let k4 = newBundle("k4");
+ let k5 = newBundle("k5");
+ let coll1 = {"foo": k1, "bar": k2};
+ let coll2 = {"foo": k1, "bar": k2};
+ let coll3 = {"foo": k1, "bar": k3};
+ let coll4 = {"foo": k4};
+ let coll5 = {"baz": k5, "bar": k2};
+ let coll6 = {};
+
+ let d1 = collectionKeys._compareKeyBundleCollections(coll1, coll2); // []
+ let d2 = collectionKeys._compareKeyBundleCollections(coll1, coll3); // ["bar"]
+ let d3 = collectionKeys._compareKeyBundleCollections(coll3, coll2); // ["bar"]
+ let d4 = collectionKeys._compareKeyBundleCollections(coll1, coll4); // ["bar", "foo"]
+ let d5 = collectionKeys._compareKeyBundleCollections(coll5, coll2); // ["baz", "foo"]
+ let d6 = collectionKeys._compareKeyBundleCollections(coll6, coll1); // ["bar", "foo"]
+ let d7 = collectionKeys._compareKeyBundleCollections(coll5, coll5); // []
+ let d8 = collectionKeys._compareKeyBundleCollections(coll6, coll6); // []
+
+ do_check_true(d1.same);
+ do_check_false(d2.same);
+ do_check_false(d3.same);
+ do_check_false(d4.same);
+ do_check_false(d5.same);
+ do_check_false(d6.same);
+ do_check_true(d7.same);
+ do_check_true(d8.same);
+
+ do_check_array_eq(d1.changed, []);
+ do_check_array_eq(d2.changed, ["bar"]);
+ do_check_array_eq(d3.changed, ["bar"]);
+ do_check_array_eq(d4.changed, ["bar", "foo"]);
+ do_check_array_eq(d5.changed, ["baz", "foo"]);
+ do_check_array_eq(d6.changed, ["bar", "foo"]);
+
+ run_next_test();
+});
+
+function run_test() {
+ // Only do 1,000 to avoid a 5-second pause in test runs.
+ test_time_keyFromString(1000);
+
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_load_modules.js b/services/sync/tests/unit/test_load_modules.js
new file mode 100644
index 000000000..0b222520c
--- /dev/null
+++ b/services/sync/tests/unit/test_load_modules.js
@@ -0,0 +1,55 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+const modules = [
+ "addonutils.js",
+ "addonsreconciler.js",
+ "browserid_identity.js",
+ "constants.js",
+ "engines/addons.js",
+ "engines/bookmarks.js",
+ "engines/clients.js",
+ "engines/extension-storage.js",
+ "engines/forms.js",
+ "engines/history.js",
+ "engines/passwords.js",
+ "engines/prefs.js",
+ "engines/tabs.js",
+ "engines.js",
+ "identity.js",
+ "jpakeclient.js",
+ "keys.js",
+ "main.js",
+ "policies.js",
+ "record.js",
+ "resource.js",
+ "rest.js",
+ "service.js",
+ "stages/cluster.js",
+ "stages/declined.js",
+ "stages/enginesync.js",
+ "status.js",
+ "userapi.js",
+ "util.js",
+];
+
+const testingModules = [
+ "fakeservices.js",
+ "rotaryengine.js",
+ "utils.js",
+ "fxa_utils.js",
+];
+
+function run_test() {
+ for (let m of modules) {
+ let res = "resource://services-sync/" + m;
+ _("Attempting to load " + res);
+ Cu.import(res, {});
+ }
+
+ for (let m of testingModules) {
+ let res = "resource://testing-common/services/sync/" + m;
+ _("Attempting to load " + res);
+ Cu.import(res, {});
+ }
+}
diff --git a/services/sync/tests/unit/test_node_reassignment.js b/services/sync/tests/unit/test_node_reassignment.js
new file mode 100644
index 000000000..66d21b6f1
--- /dev/null
+++ b/services/sync/tests/unit/test_node_reassignment.js
@@ -0,0 +1,523 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Test that node reassignment responses are respected on all kinds of " +
+ "requests.");
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.clear();
+
+function run_test() {
+ Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Resource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+ initTestLogging();
+ validate_all_future_pings();
+ ensureLegacyIdentityManager();
+
+ Service.engineManager.register(RotaryEngine);
+
+ // None of the failures in this file should result in a UI error.
+ function onUIError() {
+ do_throw("Errors should not be presented in the UI.");
+ }
+ Svc.Obs.add("weave:ui:login:error", onUIError);
+ Svc.Obs.add("weave:ui:sync:error", onUIError);
+
+ run_next_test();
+}
+
+/**
+ * Emulate the following Zeus config:
+ * $draining = data.get($prefix . $host . " draining");
+ * if ($draining == "drain.") {
+ * log.warn($log_host_db_status . " migrating=1 (node-reassignment)" .
+ * $log_suffix);
+ * http.sendResponse("401 Node reassignment", $content_type,
+ * '"server request: node reassignment"', "");
+ * }
+ */
+const reassignBody = "\"server request: node reassignment\"";
+
+// API-compatible with SyncServer handler. Bind `handler` to something to use
+// as a ServerCollection handler.
+function handleReassign(handler, req, resp) {
+ resp.setStatusLine(req.httpVersion, 401, "Node reassignment");
+ resp.setHeader("Content-Type", "application/json");
+ resp.bodyOutputStream.write(reassignBody, reassignBody.length);
+}
+
+/**
+ * A node assignment handler.
+ */
+function installNodeHandler(server, next) {
+ let newNodeBody = server.baseURI;
+ function handleNodeRequest(req, resp) {
+ _("Client made a request for a node reassignment.");
+ resp.setStatusLine(req.httpVersion, 200, "OK");
+ resp.setHeader("Content-Type", "text/plain");
+ resp.bodyOutputStream.write(newNodeBody, newNodeBody.length);
+ Utils.nextTick(next);
+ }
+ let nodePath = "/user/1.0/johndoe/node/weave";
+ server.server.registerPathHandler(nodePath, handleNodeRequest);
+ _("Registered node handler at " + nodePath);
+}
+
+function prepareServer() {
+ let deferred = Promise.defer();
+ configureIdentity({username: "johndoe"}).then(() => {
+ let server = new SyncServer();
+ server.registerUser("johndoe");
+ server.start();
+ Service.serverURL = server.baseURI;
+ Service.clusterURL = server.baseURI;
+ do_check_eq(Service.userAPIURI, server.baseURI + "user/1.0/");
+ deferred.resolve(server);
+ });
+ return deferred.promise;
+}
+
+function getReassigned() {
+ try {
+ return Services.prefs.getBoolPref("services.sync.lastSyncReassigned");
+ } catch (ex) {
+ if (ex.result == Cr.NS_ERROR_UNEXPECTED) {
+ return false;
+ }
+ do_throw("Got exception retrieving lastSyncReassigned: " +
+ Log.exceptionStr(ex));
+ }
+}
+
+/**
+ * Make a test request to `url`, then watch the result of two syncs
+ * to ensure that a node request was made.
+ * Runs `between` between the two. This can be used to undo deliberate failure
+ * setup, detach observers, etc.
+ */
+function* syncAndExpectNodeReassignment(server, firstNotification, between,
+ secondNotification, url) {
+ let deferred = Promise.defer();
+ function onwards() {
+ let nodeFetched = false;
+ function onFirstSync() {
+ _("First sync completed.");
+ Svc.Obs.remove(firstNotification, onFirstSync);
+ Svc.Obs.add(secondNotification, onSecondSync);
+
+ do_check_eq(Service.clusterURL, "");
+
+ // Track whether we fetched node/weave. We want to wait for the second
+ // sync to finish so that we're cleaned up for the next test, so don't
+ // run_next_test in the node handler.
+ nodeFetched = false;
+
+ // Verify that the client requests a node reassignment.
+ // Install a node handler to watch for these requests.
+ installNodeHandler(server, function () {
+ nodeFetched = true;
+ });
+
+ // Allow for tests to clean up error conditions.
+ between();
+ }
+ function onSecondSync() {
+ _("Second sync completed.");
+ Svc.Obs.remove(secondNotification, onSecondSync);
+ Service.scheduler.clearSyncTriggers();
+
+ // Make absolutely sure that any event listeners are done with their work
+ // before we proceed.
+ waitForZeroTimer(function () {
+ _("Second sync nextTick.");
+ do_check_true(nodeFetched);
+ Service.startOver();
+ server.stop(deferred.resolve);
+ });
+ }
+
+ Svc.Obs.add(firstNotification, onFirstSync);
+ Service.sync();
+ }
+
+ // Make sure that it works!
+ let request = new RESTRequest(url);
+ request.get(function () {
+ do_check_eq(request.response.status, 401);
+ Utils.nextTick(onwards);
+ });
+ yield deferred.promise;
+}
+
+add_task(function* test_momentary_401_engine() {
+ _("Test a failure for engine URLs that's resolved by reassignment.");
+ let server = yield prepareServer();
+ let john = server.user("johndoe");
+
+ _("Enabling the Rotary engine.");
+ let engine = Service.engineManager.get("rotary");
+ engine.enabled = true;
+
+ // We need the server to be correctly set up prior to experimenting. Do this
+ // through a sync.
+ let global = {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ rotary: {version: engine.version,
+ syncID: engine.syncID}}
+ john.createCollection("meta").insert("global", global);
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Setting up Rotary collection to 401.");
+ let rotary = john.createCollection("rotary");
+ let oldHandler = rotary.collectionHandler;
+ rotary.collectionHandler = handleReassign.bind(this, undefined);
+
+ // We want to verify that the clusterURL pref has been cleared after a 401
+ // inside a sync. Flag the Rotary engine to need syncing.
+ john.collection("rotary").timestamp += 1000;
+
+ function between() {
+ _("Undoing test changes.");
+ rotary.collectionHandler = oldHandler;
+
+ function onLoginStart() {
+ // lastSyncReassigned shouldn't be cleared until a sync has succeeded.
+ _("Ensuring that lastSyncReassigned is still set at next sync start.");
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ do_check_true(getReassigned());
+ }
+
+ _("Adding observer that lastSyncReassigned is still set on login.");
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+ }
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:finish",
+ between,
+ "weave:service:sync:finish",
+ Service.storageURL + "rotary");
+});
+
+// This test ends up being a failing fetch *after we're already logged in*.
+add_task(function* test_momentary_401_info_collections() {
+ _("Test a failure for info/collections that's resolved by reassignment.");
+ let server = yield prepareServer();
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ // Return a 401 for info requests, particularly info/collections.
+ let oldHandler = server.toplevelHandlers.info;
+ server.toplevelHandlers.info = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.info = oldHandler;
+ }
+
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.infoURL);
+});
+
+add_task(function* test_momentary_401_storage_loggedin() {
+ _("Test a failure for any storage URL, not just engine parts. " +
+ "Resolved by reassignment.");
+ let server = yield prepareServer();
+
+ _("Performing initial sync to ensure we are logged in.")
+ Service.sync();
+
+ // Return a 401 for all storage requests.
+ let oldHandler = server.toplevelHandlers.storage;
+ server.toplevelHandlers.storage = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.storage = oldHandler;
+ }
+
+ do_check_true(Service.isLoggedIn, "already logged in");
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:sync:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.storageURL + "meta/global");
+});
+
+add_task(function* test_momentary_401_storage_loggedout() {
+ _("Test a failure for any storage URL, not just engine parts. " +
+ "Resolved by reassignment.");
+ let server = yield prepareServer();
+
+ // Return a 401 for all storage requests.
+ let oldHandler = server.toplevelHandlers.storage;
+ server.toplevelHandlers.storage = handleReassign;
+
+ function undo() {
+ _("Undoing test changes.");
+ server.toplevelHandlers.storage = oldHandler;
+ }
+
+ do_check_false(Service.isLoggedIn, "not already logged in");
+ yield syncAndExpectNodeReassignment(server,
+ "weave:service:login:error",
+ undo,
+ "weave:service:sync:finish",
+ Service.storageURL + "meta/global");
+});
+
+add_task(function* test_loop_avoidance_storage() {
+ _("Test that a repeated failure doesn't result in a sync loop " +
+ "if node reassignment cannot resolve the failure.");
+
+ let server = yield prepareServer();
+
+ // Return a 401 for all storage requests.
+ let oldHandler = server.toplevelHandlers.storage;
+ server.toplevelHandlers.storage = handleReassign;
+
+ let firstNotification = "weave:service:login:error";
+ let secondNotification = "weave:service:login:error";
+ let thirdNotification = "weave:service:sync:finish";
+
+ let nodeFetched = false;
+ let deferred = Promise.defer();
+
+ // Track the time. We want to make sure the duration between the first and
+ // second sync is small, and then that the duration between second and third
+ // is set to be large.
+ let now;
+
+ function onFirstSync() {
+ _("First sync completed.");
+ Svc.Obs.remove(firstNotification, onFirstSync);
+ Svc.Obs.add(secondNotification, onSecondSync);
+
+ do_check_eq(Service.clusterURL, "");
+
+ // We got a 401 mid-sync, and set the pref accordingly.
+ do_check_true(Services.prefs.getBoolPref("services.sync.lastSyncReassigned"));
+
+ // Track whether we fetched node/weave. We want to wait for the second
+ // sync to finish so that we're cleaned up for the next test, so don't
+ // run_next_test in the node handler.
+ nodeFetched = false;
+
+ // Verify that the client requests a node reassignment.
+ // Install a node handler to watch for these requests.
+ installNodeHandler(server, function () {
+ nodeFetched = true;
+ });
+
+ // Update the timestamp.
+ now = Date.now();
+ }
+
+ function onSecondSync() {
+ _("Second sync completed.");
+ Svc.Obs.remove(secondNotification, onSecondSync);
+ Svc.Obs.add(thirdNotification, onThirdSync);
+
+ // This sync occurred within the backoff interval.
+ let elapsedTime = Date.now() - now;
+ do_check_true(elapsedTime < MINIMUM_BACKOFF_INTERVAL);
+
+ // This pref will be true until a sync completes successfully.
+ do_check_true(getReassigned());
+
+ // The timer will be set for some distant time.
+ // We store nextSync in prefs, which offers us only limited resolution.
+ // Include that logic here.
+ let expectedNextSync = 1000 * Math.floor((now + MINIMUM_BACKOFF_INTERVAL) / 1000);
+ _("Next sync scheduled for " + Service.scheduler.nextSync);
+ _("Expected to be slightly greater than " + expectedNextSync);
+
+ do_check_true(Service.scheduler.nextSync >= expectedNextSync);
+ do_check_true(!!Service.scheduler.syncTimer);
+
+ // Undo our evil scheme.
+ server.toplevelHandlers.storage = oldHandler;
+
+ // Bring the timer forward to kick off a successful sync, so we can watch
+ // the pref get cleared.
+ Service.scheduler.scheduleNextSync(0);
+ }
+ function onThirdSync() {
+ Svc.Obs.remove(thirdNotification, onThirdSync);
+
+ // That'll do for now; no more syncs.
+ Service.scheduler.clearSyncTriggers();
+
+ // Make absolutely sure that any event listeners are done with their work
+ // before we proceed.
+ waitForZeroTimer(function () {
+ _("Third sync nextTick.");
+ do_check_false(getReassigned());
+ do_check_true(nodeFetched);
+ Service.startOver();
+ server.stop(deferred.resolve);
+ });
+ }
+
+ Svc.Obs.add(firstNotification, onFirstSync);
+
+ now = Date.now();
+ Service.sync();
+ yield deferred.promise;
+});
+
+add_task(function* test_loop_avoidance_engine() {
+ _("Test that a repeated 401 in an engine doesn't result in a sync loop " +
+ "if node reassignment cannot resolve the failure.");
+ let server = yield prepareServer();
+ let john = server.user("johndoe");
+
+ _("Enabling the Rotary engine.");
+ let engine = Service.engineManager.get("rotary");
+ engine.enabled = true;
+ let deferred = Promise.defer();
+
+ // We need the server to be correctly set up prior to experimenting. Do this
+ // through a sync.
+ let global = {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ rotary: {version: engine.version,
+ syncID: engine.syncID}}
+ john.createCollection("meta").insert("global", global);
+
+ _("First sync to prepare server contents.");
+ Service.sync();
+
+ _("Setting up Rotary collection to 401.");
+ let rotary = john.createCollection("rotary");
+ let oldHandler = rotary.collectionHandler;
+ rotary.collectionHandler = handleReassign.bind(this, undefined);
+
+ // Flag the Rotary engine to need syncing.
+ john.collection("rotary").timestamp += 1000;
+
+ function onLoginStart() {
+ // lastSyncReassigned shouldn't be cleared until a sync has succeeded.
+ _("Ensuring that lastSyncReassigned is still set at next sync start.");
+ do_check_true(getReassigned());
+ }
+
+ function beforeSuccessfulSync() {
+ _("Undoing test changes.");
+ rotary.collectionHandler = oldHandler;
+ }
+
+ function afterSuccessfulSync() {
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ Service.startOver();
+ server.stop(deferred.resolve);
+ }
+
+ let firstNotification = "weave:service:sync:finish";
+ let secondNotification = "weave:service:sync:finish";
+ let thirdNotification = "weave:service:sync:finish";
+
+ let nodeFetched = false;
+
+ // Track the time. We want to make sure the duration between the first and
+ // second sync is small, and then that the duration between second and third
+ // is set to be large.
+ let now;
+
+ function onFirstSync() {
+ _("First sync completed.");
+ Svc.Obs.remove(firstNotification, onFirstSync);
+ Svc.Obs.add(secondNotification, onSecondSync);
+
+ do_check_eq(Service.clusterURL, "");
+
+ _("Adding observer that lastSyncReassigned is still set on login.");
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+
+ // We got a 401 mid-sync, and set the pref accordingly.
+ do_check_true(Services.prefs.getBoolPref("services.sync.lastSyncReassigned"));
+
+ // Track whether we fetched node/weave. We want to wait for the second
+ // sync to finish so that we're cleaned up for the next test, so don't
+ // run_next_test in the node handler.
+ nodeFetched = false;
+
+ // Verify that the client requests a node reassignment.
+ // Install a node handler to watch for these requests.
+ installNodeHandler(server, function () {
+ nodeFetched = true;
+ });
+
+ // Update the timestamp.
+ now = Date.now();
+ }
+
+ function onSecondSync() {
+ _("Second sync completed.");
+ Svc.Obs.remove(secondNotification, onSecondSync);
+ Svc.Obs.add(thirdNotification, onThirdSync);
+
+ // This sync occurred within the backoff interval.
+ let elapsedTime = Date.now() - now;
+ do_check_true(elapsedTime < MINIMUM_BACKOFF_INTERVAL);
+
+ // This pref will be true until a sync completes successfully.
+ do_check_true(getReassigned());
+
+ // The timer will be set for some distant time.
+ // We store nextSync in prefs, which offers us only limited resolution.
+ // Include that logic here.
+ let expectedNextSync = 1000 * Math.floor((now + MINIMUM_BACKOFF_INTERVAL) / 1000);
+ _("Next sync scheduled for " + Service.scheduler.nextSync);
+ _("Expected to be slightly greater than " + expectedNextSync);
+
+ do_check_true(Service.scheduler.nextSync >= expectedNextSync);
+ do_check_true(!!Service.scheduler.syncTimer);
+
+ // Undo our evil scheme.
+ beforeSuccessfulSync();
+
+ // Bring the timer forward to kick off a successful sync, so we can watch
+ // the pref get cleared.
+ Service.scheduler.scheduleNextSync(0);
+ }
+
+ function onThirdSync() {
+ Svc.Obs.remove(thirdNotification, onThirdSync);
+
+ // That'll do for now; no more syncs.
+ Service.scheduler.clearSyncTriggers();
+
+ // Make absolutely sure that any event listeners are done with their work
+ // before we proceed.
+ waitForZeroTimer(function () {
+ _("Third sync nextTick.");
+ do_check_false(getReassigned());
+ do_check_true(nodeFetched);
+ afterSuccessfulSync();
+ });
+ }
+
+ Svc.Obs.add(firstNotification, onFirstSync);
+
+ now = Date.now();
+ Service.sync();
+ yield deferred.promise;
+});
diff --git a/services/sync/tests/unit/test_password_store.js b/services/sync/tests/unit/test_password_store.js
new file mode 100644
index 000000000..d232d5e63
--- /dev/null
+++ b/services/sync/tests/unit/test_password_store.js
@@ -0,0 +1,199 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines/passwords.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+
+function checkRecord(name, record, expectedCount, timeCreated,
+ expectedTimeCreated, timePasswordChanged,
+ expectedTimePasswordChanged, recordIsUpdated) {
+ let engine = Service.engineManager.get("passwords");
+ let store = engine._store;
+
+ let count = {};
+ let logins = Services.logins.findLogins(count, record.hostname,
+ record.formSubmitURL, null);
+
+ _("Record" + name + ":" + JSON.stringify(logins));
+ _("Count" + name + ":" + count.value);
+
+ do_check_eq(count.value, expectedCount);
+
+ if (expectedCount > 0) {
+ do_check_true(!!store.getAllIDs()[record.id]);
+ let stored_record = logins[0].QueryInterface(Ci.nsILoginMetaInfo);
+
+ if (timeCreated !== undefined) {
+ do_check_eq(stored_record.timeCreated, expectedTimeCreated);
+ }
+
+ if (timePasswordChanged !== undefined) {
+ if (recordIsUpdated) {
+ do_check_true(stored_record.timePasswordChanged >= expectedTimePasswordChanged);
+ } else {
+ do_check_eq(stored_record.timePasswordChanged, expectedTimePasswordChanged);
+ }
+ return stored_record.timePasswordChanged;
+ }
+ } else {
+ do_check_true(!store.getAllIDs()[record.id]);
+ }
+}
+
+
+function changePassword(name, hostname, password, expectedCount, timeCreated,
+ expectedTimeCreated, timePasswordChanged,
+ expectedTimePasswordChanged, insert, recordIsUpdated) {
+
+ const BOGUS_GUID = "zzzzzz" + hostname;
+
+ let record = {id: BOGUS_GUID,
+ hostname: hostname,
+ formSubmitURL: hostname,
+ username: "john",
+ password: password,
+ usernameField: "username",
+ passwordField: "password"};
+
+ if (timeCreated !== undefined) {
+ record.timeCreated = timeCreated;
+ }
+
+ if (timePasswordChanged !== undefined) {
+ record.timePasswordChanged = timePasswordChanged;
+ }
+
+
+ let engine = Service.engineManager.get("passwords");
+ let store = engine._store;
+
+ if (insert) {
+ do_check_eq(store.applyIncomingBatch([record]).length, 0);
+ }
+
+ return checkRecord(name, record, expectedCount, timeCreated,
+ expectedTimeCreated, timePasswordChanged,
+ expectedTimePasswordChanged, recordIsUpdated);
+
+}
+
+
+function test_apply_records_with_times(hostname, timeCreated, timePasswordChanged) {
+ // The following record is going to be inserted in the store and it needs
+ // to be found there. Then its timestamps are going to be compared to
+ // the expected values.
+ changePassword(" ", hostname, "password", 1, timeCreated, timeCreated,
+ timePasswordChanged, timePasswordChanged, true);
+}
+
+
+function test_apply_multiple_records_with_times() {
+ // The following records are going to be inserted in the store and they need
+ // to be found there. Then their timestamps are going to be compared to
+ // the expected values.
+ changePassword("A", "http://foo.a.com", "password", 1, undefined, undefined,
+ undefined, undefined, true);
+ changePassword("B", "http://foo.b.com", "password", 1, 1000, 1000, undefined,
+ undefined, true);
+ changePassword("C", "http://foo.c.com", "password", 1, undefined, undefined,
+ 1000, 1000, true);
+ changePassword("D", "http://foo.d.com", "password", 1, 1000, 1000, 1000,
+ 1000, true);
+
+ // The following records are not going to be inserted in the store and they
+ // are not going to be found there.
+ changePassword("NotInStoreA", "http://foo.aaaa.com", "password", 0,
+ undefined, undefined, undefined, undefined, false);
+ changePassword("NotInStoreB", "http://foo.bbbb.com", "password", 0, 1000,
+ 1000, undefined, undefined, false);
+ changePassword("NotInStoreC", "http://foo.cccc.com", "password", 0,
+ undefined, undefined, 1000, 1000, false);
+ changePassword("NotInStoreD", "http://foo.dddd.com", "password", 0, 1000,
+ 1000, 1000, 1000, false);
+}
+
+
+function test_apply_same_record_with_different_times() {
+ // The following record is going to be inserted multiple times in the store
+ // and it needs to be found there. Then its timestamps are going to be
+ // compared to the expected values.
+ var timePasswordChanged = 100;
+ timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100,
+ 100, 100, timePasswordChanged, true);
+ timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100,
+ 100, 800, timePasswordChanged, true,
+ true);
+ timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 500,
+ 100, 800, timePasswordChanged, true,
+ true);
+ timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500,
+ 100, 1536213005222, timePasswordChanged,
+ true, true);
+ timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500,
+ 100, 800, timePasswordChanged, true, true);
+}
+
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Engine.Passwords").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Store.Passwords").level = Log.Level.Trace;
+
+ const BOGUS_GUID_A = "zzzzzzzzzzzz";
+ const BOGUS_GUID_B = "yyyyyyyyyyyy";
+ let recordA = {id: BOGUS_GUID_A,
+ hostname: "http://foo.bar.com",
+ formSubmitURL: "http://foo.bar.com/baz",
+ httpRealm: "secure",
+ username: "john",
+ password: "smith",
+ usernameField: "username",
+ passwordField: "password"};
+ let recordB = {id: BOGUS_GUID_B,
+ hostname: "http://foo.baz.com",
+ formSubmitURL: "http://foo.baz.com/baz",
+ username: "john",
+ password: "smith",
+ usernameField: "username",
+ passwordField: "password"};
+
+ let engine = Service.engineManager.get("passwords");
+ let store = engine._store;
+
+ try {
+ do_check_eq(store.applyIncomingBatch([recordA, recordB]).length, 0);
+
+ // Only the good record makes it to Services.logins.
+ let badCount = {};
+ let goodCount = {};
+ let badLogins = Services.logins.findLogins(badCount, recordA.hostname,
+ recordA.formSubmitURL,
+ recordA.httpRealm);
+ let goodLogins = Services.logins.findLogins(goodCount, recordB.hostname,
+ recordB.formSubmitURL, null);
+
+ _("Bad: " + JSON.stringify(badLogins));
+ _("Good: " + JSON.stringify(goodLogins));
+ _("Count: " + badCount.value + ", " + goodCount.value);
+
+ do_check_eq(goodCount.value, 1);
+ do_check_eq(badCount.value, 0);
+
+ do_check_true(!!store.getAllIDs()[BOGUS_GUID_B]);
+ do_check_true(!store.getAllIDs()[BOGUS_GUID_A]);
+
+ test_apply_records_with_times("http://afoo.baz.com", undefined, undefined);
+ test_apply_records_with_times("http://bfoo.baz.com", 1000, undefined);
+ test_apply_records_with_times("http://cfoo.baz.com", undefined, 2000);
+ test_apply_records_with_times("http://dfoo.baz.com", 1000, 2000);
+
+ test_apply_multiple_records_with_times();
+
+ test_apply_same_record_with_different_times();
+
+ } finally {
+ store.wipe();
+ }
+} \ No newline at end of file
diff --git a/services/sync/tests/unit/test_password_tracker.js b/services/sync/tests/unit/test_password_tracker.js
new file mode 100644
index 000000000..09ca141a6
--- /dev/null
+++ b/services/sync/tests/unit/test_password_tracker.js
@@ -0,0 +1,101 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines/passwords.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+Service.engineManager.register(PasswordEngine);
+var engine = Service.engineManager.get("passwords");
+var store = engine._store;
+var tracker = engine._tracker;
+
+// Don't do asynchronous writes.
+tracker.persistChangedIDs = false;
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+add_test(function test_tracking() {
+ let recordNum = 0;
+
+ _("Verify we've got an empty tracker to work with.");
+ do_check_empty(tracker.changedIDs);
+
+ function createPassword() {
+ _("RECORD NUM: " + recordNum);
+ let record = {id: "GUID" + recordNum,
+ hostname: "http://foo.bar.com",
+ formSubmitURL: "http://foo.bar.com/baz",
+ username: "john" + recordNum,
+ password: "smith",
+ usernameField: "username",
+ passwordField: "password"};
+ recordNum++;
+ let login = store._nsLoginInfoFromRecord(record);
+ Services.logins.addLogin(login);
+ }
+
+ try {
+ _("Create a password record. Won't show because we haven't started tracking yet");
+ createPassword();
+ do_check_empty(tracker.changedIDs);
+ do_check_eq(tracker.score, 0);
+
+ _("Tell the tracker to start tracking changes.");
+ Svc.Obs.notify("weave:engine:start-tracking");
+ createPassword();
+ do_check_attribute_count(tracker.changedIDs, 1);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+
+ _("Notifying twice won't do any harm.");
+ Svc.Obs.notify("weave:engine:start-tracking");
+ createPassword();
+ do_check_attribute_count(tracker.changedIDs, 2);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+
+ _("Let's stop tracking again.");
+ tracker.clearChangedIDs();
+ tracker.resetScore();
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ createPassword();
+ do_check_empty(tracker.changedIDs);
+ do_check_eq(tracker.score, 0);
+
+ _("Notifying twice won't do any harm.");
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ createPassword();
+ do_check_empty(tracker.changedIDs);
+ do_check_eq(tracker.score, 0);
+
+ } finally {
+ _("Clean up.");
+ store.wipe();
+ tracker.clearChangedIDs();
+ tracker.resetScore();
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ run_next_test();
+ }
+});
+
+add_test(function test_onWipe() {
+ _("Verify we've got an empty tracker to work with.");
+ do_check_empty(tracker.changedIDs);
+ do_check_eq(tracker.score, 0);
+
+ try {
+ _("A store wipe should increment the score");
+ Svc.Obs.notify("weave:engine:start-tracking");
+ store.wipe();
+
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ } finally {
+ tracker.resetScore();
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ run_next_test();
+ }
+});
diff --git a/services/sync/tests/unit/test_password_validator.js b/services/sync/tests/unit/test_password_validator.js
new file mode 100644
index 000000000..a4a148fbe
--- /dev/null
+++ b/services/sync/tests/unit/test_password_validator.js
@@ -0,0 +1,158 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Components.utils.import("resource://services-sync/engines/passwords.js");
+
+function getDummyServerAndClient() {
+ return {
+ server: [
+ {
+ id: "11111",
+ guid: "11111",
+ hostname: "https://www.11111.com",
+ formSubmitURL: "https://www.11111.com/login",
+ password: "qwerty123",
+ passwordField: "pass",
+ username: "foobar",
+ usernameField: "user",
+ httpRealm: null,
+ },
+ {
+ id: "22222",
+ guid: "22222",
+ hostname: "https://www.22222.org",
+ formSubmitURL: "https://www.22222.org/login",
+ password: "hunter2",
+ passwordField: "passwd",
+ username: "baz12345",
+ usernameField: "user",
+ httpRealm: null,
+ },
+ {
+ id: "33333",
+ guid: "33333",
+ hostname: "https://www.33333.com",
+ formSubmitURL: "https://www.33333.com/login",
+ password: "p4ssw0rd",
+ passwordField: "passwad",
+ username: "quux",
+ usernameField: "user",
+ httpRealm: null,
+ },
+ ],
+ client: [
+ {
+ id: "11111",
+ guid: "11111",
+ hostname: "https://www.11111.com",
+ formSubmitURL: "https://www.11111.com/login",
+ password: "qwerty123",
+ passwordField: "pass",
+ username: "foobar",
+ usernameField: "user",
+ httpRealm: null,
+ },
+ {
+ id: "22222",
+ guid: "22222",
+ hostname: "https://www.22222.org",
+ formSubmitURL: "https://www.22222.org/login",
+ password: "hunter2",
+ passwordField: "passwd",
+ username: "baz12345",
+ usernameField: "user",
+ httpRealm: null,
+
+ },
+ {
+ id: "33333",
+ guid: "33333",
+ hostname: "https://www.33333.com",
+ formSubmitURL: "https://www.33333.com/login",
+ password: "p4ssw0rd",
+ passwordField: "passwad",
+ username: "quux",
+ usernameField: "user",
+ httpRealm: null,
+ }
+ ]
+ };
+}
+
+
+add_test(function test_valid() {
+ let { server, client } = getDummyServerAndClient();
+ let validator = new PasswordValidator();
+ let { problemData, clientRecords, records, deletedRecords } =
+ validator.compareClientWithServer(client, server);
+ equal(clientRecords.length, 3);
+ equal(records.length, 3)
+ equal(deletedRecords.length, 0);
+ deepEqual(problemData, validator.emptyProblemData());
+
+ run_next_test();
+});
+
+add_test(function test_missing() {
+ let validator = new PasswordValidator();
+ {
+ let { server, client } = getDummyServerAndClient();
+
+ client.pop();
+
+ let { problemData, clientRecords, records, deletedRecords } =
+ validator.compareClientWithServer(client, server);
+
+ equal(clientRecords.length, 2);
+ equal(records.length, 3)
+ equal(deletedRecords.length, 0);
+
+ let expected = validator.emptyProblemData();
+ expected.clientMissing.push("33333");
+ deepEqual(problemData, expected);
+ }
+ {
+ let { server, client } = getDummyServerAndClient();
+
+ server.pop();
+
+ let { problemData, clientRecords, records, deletedRecords } =
+ validator.compareClientWithServer(client, server);
+
+ equal(clientRecords.length, 3);
+ equal(records.length, 2)
+ equal(deletedRecords.length, 0);
+
+ let expected = validator.emptyProblemData();
+ expected.serverMissing.push("33333");
+ deepEqual(problemData, expected);
+ }
+
+ run_next_test();
+});
+
+
+add_test(function test_deleted() {
+ let { server, client } = getDummyServerAndClient();
+ let deletionRecord = { id: "444444", guid: "444444", deleted: true };
+
+ server.push(deletionRecord);
+ let validator = new PasswordValidator();
+
+ let { problemData, clientRecords, records, deletedRecords } =
+ validator.compareClientWithServer(client, server);
+
+ equal(clientRecords.length, 3);
+ equal(records.length, 4);
+ deepEqual(deletedRecords, [deletionRecord]);
+
+ let expected = validator.emptyProblemData();
+ deepEqual(problemData, expected);
+
+ run_next_test();
+});
+
+
+function run_test() {
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_places_guid_downgrade.js b/services/sync/tests/unit/test_places_guid_downgrade.js
new file mode 100644
index 000000000..2f99c4a93
--- /dev/null
+++ b/services/sync/tests/unit/test_places_guid_downgrade.js
@@ -0,0 +1,215 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/history.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/service.js");
+
+const kDBName = "places.sqlite";
+const storageSvc = Cc["@mozilla.org/storage/service;1"]
+ .getService(Ci.mozIStorageService);
+
+const fxuri = Utils.makeURI("http://getfirefox.com/");
+const tburi = Utils.makeURI("http://getthunderbird.com/");
+
+function setPlacesDatabase(aFileName) {
+ removePlacesDatabase();
+ _("Copying over places.sqlite.");
+ let file = do_get_file(aFileName);
+ file.copyTo(gSyncProfile, kDBName);
+}
+
+function removePlacesDatabase() {
+ _("Removing places.sqlite.");
+ let file = gSyncProfile.clone();
+ file.append(kDBName);
+ try {
+ file.remove(false);
+ } catch (ex) {
+ // Windows is awesome. NOT.
+ }
+}
+
+Svc.Obs.add("places-shutdown", function () {
+ do_timeout(0, removePlacesDatabase);
+});
+
+
+// Verify initial database state. Function borrowed from places tests.
+add_test(function test_initial_state() {
+ _("Verify initial setup: v11 database is available");
+
+ // Mostly sanity checks our starting DB to make sure it's setup as we expect
+ // it to be.
+ let dbFile = gSyncProfile.clone();
+ dbFile.append(kDBName);
+ let db = storageSvc.openUnsharedDatabase(dbFile);
+
+ let stmt = db.createStatement("PRAGMA journal_mode");
+ do_check_true(stmt.executeStep());
+ // WAL journal mode should have been unset this database when it was migrated
+ // down to v10.
+ do_check_neq(stmt.getString(0).toLowerCase(), "wal");
+ stmt.finalize();
+
+ do_check_true(db.indexExists("moz_bookmarks_guid_uniqueindex"));
+ do_check_true(db.indexExists("moz_places_guid_uniqueindex"));
+
+ // There should be a non-zero amount of bookmarks without a guid.
+ stmt = db.createStatement(
+ "SELECT COUNT(1) "
+ + "FROM moz_bookmarks "
+ + "WHERE guid IS NULL "
+ );
+ do_check_true(stmt.executeStep());
+ do_check_neq(stmt.getInt32(0), 0);
+ stmt.finalize();
+
+ // There should be a non-zero amount of places without a guid.
+ stmt = db.createStatement(
+ "SELECT COUNT(1) "
+ + "FROM moz_places "
+ + "WHERE guid IS NULL "
+ );
+ do_check_true(stmt.executeStep());
+ do_check_neq(stmt.getInt32(0), 0);
+ stmt.finalize();
+
+ // Check our schema version to make sure it is actually at 10.
+ do_check_eq(db.schemaVersion, 10);
+
+ db.close();
+
+ run_next_test();
+});
+
+add_test(function test_history_guids() {
+ let engine = new HistoryEngine(Service);
+ let store = engine._store;
+
+ let places = [
+ {
+ uri: fxuri,
+ title: "Get Firefox!",
+ visits: [{
+ visitDate: Date.now() * 1000,
+ transitionType: Ci.nsINavHistoryService.TRANSITION_LINK
+ }]
+ },
+ {
+ uri: tburi,
+ title: "Get Thunderbird!",
+ visits: [{
+ visitDate: Date.now() * 1000,
+ transitionType: Ci.nsINavHistoryService.TRANSITION_LINK
+ }]
+ }
+ ];
+ PlacesUtils.asyncHistory.updatePlaces(places, {
+ handleError: function handleError() {
+ do_throw("Unexpected error in adding visit.");
+ },
+ handleResult: function handleResult() {},
+ handleCompletion: onVisitAdded
+ });
+
+ function onVisitAdded() {
+ let fxguid = store.GUIDForUri(fxuri, true);
+ let tbguid = store.GUIDForUri(tburi, true);
+ dump("fxguid: " + fxguid + "\n");
+ dump("tbguid: " + tbguid + "\n");
+
+ _("History: Verify GUIDs are added to the guid column.");
+ let connection = PlacesUtils.history
+ .QueryInterface(Ci.nsPIPlacesDatabase)
+ .DBConnection;
+ let stmt = connection.createAsyncStatement(
+ "SELECT id FROM moz_places WHERE guid = :guid");
+
+ stmt.params.guid = fxguid;
+ let result = Async.querySpinningly(stmt, ["id"]);
+ do_check_eq(result.length, 1);
+
+ stmt.params.guid = tbguid;
+ result = Async.querySpinningly(stmt, ["id"]);
+ do_check_eq(result.length, 1);
+ stmt.finalize();
+
+ _("History: Verify GUIDs weren't added to annotations.");
+ stmt = connection.createAsyncStatement(
+ "SELECT a.content AS guid FROM moz_annos a WHERE guid = :guid");
+
+ stmt.params.guid = fxguid;
+ result = Async.querySpinningly(stmt, ["guid"]);
+ do_check_eq(result.length, 0);
+
+ stmt.params.guid = tbguid;
+ result = Async.querySpinningly(stmt, ["guid"]);
+ do_check_eq(result.length, 0);
+ stmt.finalize();
+
+ run_next_test();
+ }
+});
+
+add_test(function test_bookmark_guids() {
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+
+ let fxid = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder,
+ fxuri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Firefox!");
+ let tbid = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.toolbarFolder,
+ tburi,
+ PlacesUtils.bookmarks.DEFAULT_INDEX,
+ "Get Thunderbird!");
+
+ let fxguid = store.GUIDForId(fxid);
+ let tbguid = store.GUIDForId(tbid);
+
+ _("Bookmarks: Verify GUIDs are added to the guid column.");
+ let connection = PlacesUtils.history
+ .QueryInterface(Ci.nsPIPlacesDatabase)
+ .DBConnection;
+ let stmt = connection.createAsyncStatement(
+ "SELECT id FROM moz_bookmarks WHERE guid = :guid");
+
+ stmt.params.guid = fxguid;
+ let result = Async.querySpinningly(stmt, ["id"]);
+ do_check_eq(result.length, 1);
+ do_check_eq(result[0].id, fxid);
+
+ stmt.params.guid = tbguid;
+ result = Async.querySpinningly(stmt, ["id"]);
+ do_check_eq(result.length, 1);
+ do_check_eq(result[0].id, tbid);
+ stmt.finalize();
+
+ _("Bookmarks: Verify GUIDs weren't added to annotations.");
+ stmt = connection.createAsyncStatement(
+ "SELECT a.content AS guid FROM moz_items_annos a WHERE guid = :guid");
+
+ stmt.params.guid = fxguid;
+ result = Async.querySpinningly(stmt, ["guid"]);
+ do_check_eq(result.length, 0);
+
+ stmt.params.guid = tbguid;
+ result = Async.querySpinningly(stmt, ["guid"]);
+ do_check_eq(result.length, 0);
+ stmt.finalize();
+
+ run_next_test();
+});
+
+function run_test() {
+ setPlacesDatabase("places_v10_from_v11.sqlite");
+
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_postqueue.js b/services/sync/tests/unit/test_postqueue.js
new file mode 100644
index 000000000..e60008a96
--- /dev/null
+++ b/services/sync/tests/unit/test_postqueue.js
@@ -0,0 +1,455 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+let { PostQueue } = Cu.import("resource://services-sync/record.js", {});
+
+initTestLogging("Trace");
+
+function makeRecord(nbytes) {
+ // make a string 2-bytes less - the added quotes will make it correct.
+ return {
+ toJSON: () => "x".repeat(nbytes-2),
+ }
+}
+
+function makePostQueue(config, lastModTime, responseGenerator) {
+ let stats = {
+ posts: [],
+ }
+ let poster = (data, headers, batch, commit) => {
+ let thisPost = { nbytes: data.length, batch, commit };
+ if (headers.length) {
+ thisPost.headers = headers;
+ }
+ stats.posts.push(thisPost);
+ return responseGenerator.next().value;
+ }
+
+ let done = () => {}
+ let pq = new PostQueue(poster, lastModTime, config, getTestLogger(), done);
+ return { pq, stats };
+}
+
+add_test(function test_simple() {
+ let config = {
+ max_post_bytes: 1000,
+ max_post_records: 100,
+ max_batch_bytes: Infinity,
+ max_batch_records: Infinity,
+ }
+
+ const time = 11111111;
+
+ function* responseGenerator() {
+ yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 100, 'x-last-modified': time + 100 } };
+ }
+
+ let { pq, stats } = makePostQueue(config, time, responseGenerator());
+ pq.enqueue(makeRecord(10));
+ pq.flush(true);
+
+ deepEqual(stats.posts, [{
+ nbytes: 12, // expect our 10 byte record plus "[]" to wrap it.
+ commit: true, // we don't know if we have batch semantics, so committed.
+ headers: [["x-if-unmodified-since", time]],
+ batch: "true"}]);
+
+ run_next_test();
+});
+
+// Test we do the right thing when we need to make multiple posts when there
+// are no batch semantics
+add_test(function test_max_post_bytes_no_batch() {
+ let config = {
+ max_post_bytes: 50,
+ max_post_records: 4,
+ max_batch_bytes: Infinity,
+ max_batch_records: Infinity,
+ }
+
+ const time = 11111111;
+ function* responseGenerator() {
+ yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 100, 'x-last-modified': time + 100 } };
+ yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 200, 'x-last-modified': time + 200 } };
+ }
+
+ let { pq, stats } = makePostQueue(config, time, responseGenerator());
+ pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]"
+ pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]"
+ pq.enqueue(makeRecord(20)); // this will exceed our byte limit, so will be in the 2nd POST.
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ {
+ nbytes: 43, // 43 for the first post
+ commit: false,
+ headers: [["x-if-unmodified-since", time]],
+ batch: "true",
+ },{
+ nbytes: 22,
+ commit: false, // we know we aren't in a batch, so never commit.
+ headers: [["x-if-unmodified-since", time + 100]],
+ batch: null,
+ }
+ ]);
+ equal(pq.lastModified, time + 200);
+
+ run_next_test();
+});
+
+// Similar to the above, but we've hit max_records instead of max_bytes.
+add_test(function test_max_post_records_no_batch() {
+ let config = {
+ max_post_bytes: 100,
+ max_post_records: 2,
+ max_batch_bytes: Infinity,
+ max_batch_records: Infinity,
+ }
+
+ const time = 11111111;
+
+ function* responseGenerator() {
+ yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 100, 'x-last-modified': time + 100 } };
+ yield { success: true, status: 200, headers: { 'x-weave-timestamp': time + 200, 'x-last-modified': time + 200 } };
+ }
+
+ let { pq, stats } = makePostQueue(config, time, responseGenerator());
+ pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]"
+ pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]"
+ pq.enqueue(makeRecord(20)); // this will exceed our records limit, so will be in the 2nd POST.
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ {
+ nbytes: 43, // 43 for the first post
+ commit: false,
+ batch: "true",
+ headers: [["x-if-unmodified-since", time]],
+ },{
+ nbytes: 22,
+ commit: false, // we know we aren't in a batch, so never commit.
+ batch: null,
+ headers: [["x-if-unmodified-since", time + 100]],
+ }
+ ]);
+ equal(pq.lastModified, time + 200);
+
+ run_next_test();
+});
+
+// Batch tests.
+
+// Test making a single post when batch semantics are in place.
+add_test(function test_single_batch() {
+ let config = {
+ max_post_bytes: 1000,
+ max_post_records: 100,
+ max_batch_bytes: 2000,
+ max_batch_records: 200,
+ }
+ const time = 11111111;
+ function* responseGenerator() {
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 },
+ };
+ }
+
+ let { pq, stats } = makePostQueue(config, time, responseGenerator());
+ ok(pq.enqueue(makeRecord(10)).enqueued);
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ {
+ nbytes: 12, // expect our 10 byte record plus "[]" to wrap it.
+ commit: true, // we don't know if we have batch semantics, so committed.
+ batch: "true",
+ headers: [["x-if-unmodified-since", time]],
+ }
+ ]);
+
+ run_next_test();
+});
+
+// Test we do the right thing when we need to make multiple posts when there
+// are batch semantics in place.
+add_test(function test_max_post_bytes_batch() {
+ let config = {
+ max_post_bytes: 50,
+ max_post_records: 4,
+ max_batch_bytes: 5000,
+ max_batch_records: 100,
+ }
+
+ const time = 11111111;
+ function* responseGenerator() {
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 },
+ };
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time + 200, 'x-weave-timestamp': time + 200 },
+ };
+ }
+
+ let { pq, stats } = makePostQueue(config, time, responseGenerator());
+ ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]"
+ ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
+ ok(pq.enqueue(makeRecord(20)).enqueued); // this will exceed our byte limit, so will be in the 2nd POST.
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ {
+ nbytes: 43, // 43 for the first post
+ commit: false,
+ batch: "true",
+ headers: [['x-if-unmodified-since', time]],
+ },{
+ nbytes: 22,
+ commit: true,
+ batch: 1234,
+ headers: [['x-if-unmodified-since', time]],
+ }
+ ]);
+
+ equal(pq.lastModified, time + 200);
+
+ run_next_test();
+});
+
+// Test we do the right thing when the batch bytes limit is exceeded.
+add_test(function test_max_post_bytes_batch() {
+ let config = {
+ max_post_bytes: 50,
+ max_post_records: 20,
+ max_batch_bytes: 70,
+ max_batch_records: 100,
+ }
+
+ const time0 = 11111111;
+ const time1 = 22222222;
+ function* responseGenerator() {
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time0, 'x-weave-timestamp': time0 + 100 },
+ };
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 },
+ };
+ yield { success: true, status: 202, obj: { batch: 5678 },
+ headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 + 100 },
+ };
+ yield { success: true, status: 202, obj: { batch: 5678 },
+ headers: { 'x-last-modified': time1 + 200, 'x-weave-timestamp': time1 + 200 },
+ };
+ }
+
+ let { pq, stats } = makePostQueue(config, time0, responseGenerator());
+ ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]"
+ ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
+ // this will exceed our POST byte limit, so will be in the 2nd POST - but still in the first batch.
+ ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes for 2nd post, 55 bytes in the batch.
+ // this will exceed our batch byte limit, so will be in a new batch.
+ ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes in 3rd post/2nd batch
+ ok(pq.enqueue(makeRecord(20)).enqueued); // 43 bytes in 3rd post/2nd batch
+ // This will exceed POST byte limit, so will be in the 4th post, part of the 2nd batch.
+ ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes for 4th post/2nd batch
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ {
+ nbytes: 43, // 43 for the first post
+ commit: false,
+ batch: "true",
+ headers: [['x-if-unmodified-since', time0]],
+ },{
+ // second post of 22 bytes in the first batch, committing it.
+ nbytes: 22,
+ commit: true,
+ batch: 1234,
+ headers: [['x-if-unmodified-since', time0]],
+ }, {
+ // 3rd post of 43 bytes in a new batch, not yet committing it.
+ nbytes: 43,
+ commit: false,
+ batch: "true",
+ headers: [['x-if-unmodified-since', time1]],
+ },{
+ // 4th post of 22 bytes in second batch, committing it.
+ nbytes: 22,
+ commit: true,
+ batch: 5678,
+ headers: [['x-if-unmodified-since', time1]],
+ },
+ ]);
+
+ equal(pq.lastModified, time1 + 200);
+
+ run_next_test();
+});
+
+// Test we split up the posts when we exceed the record limit when batch semantics
+// are in place.
+add_test(function test_max_post_bytes_batch() {
+ let config = {
+ max_post_bytes: 1000,
+ max_post_records: 2,
+ max_batch_bytes: 5000,
+ max_batch_records: 100,
+ }
+
+ const time = 11111111;
+ function* responseGenerator() {
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 },
+ };
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time + 200, 'x-weave-timestamp': time + 200 },
+ };
+ }
+
+ let { pq, stats } = makePostQueue(config, time, responseGenerator());
+ ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]"
+ ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
+ ok(pq.enqueue(makeRecord(20)).enqueued); // will exceed record limit, so will be in 2nd post.
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ {
+ nbytes: 43, // 43 for the first post
+ commit: false,
+ batch: "true",
+ headers: [['x-if-unmodified-since', time]],
+ },{
+ nbytes: 22,
+ commit: true,
+ batch: 1234,
+ headers: [['x-if-unmodified-since', time]],
+ }
+ ]);
+
+ equal(pq.lastModified, time + 200);
+
+ run_next_test();
+});
+
+// Test that a single huge record fails to enqueue
+add_test(function test_huge_record() {
+ let config = {
+ max_post_bytes: 50,
+ max_post_records: 100,
+ max_batch_bytes: 5000,
+ max_batch_records: 100,
+ }
+
+ const time = 11111111;
+ function* responseGenerator() {
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time, 'x-weave-timestamp': time + 100 },
+ };
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time + 200, 'x-weave-timestamp': time + 200 },
+ };
+ }
+
+ let { pq, stats } = makePostQueue(config, time, responseGenerator());
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+
+ let { enqueued, error } = pq.enqueue(makeRecord(1000));
+ ok(!enqueued);
+ notEqual(error, undefined);
+
+ // make sure that we keep working, skipping the bad record entirely
+ // (handling the error the queue reported is left up to caller)
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ {
+ nbytes: 43, // 43 for the first post
+ commit: false,
+ batch: "true",
+ headers: [['x-if-unmodified-since', time]],
+ },{
+ nbytes: 22,
+ commit: true,
+ batch: 1234,
+ headers: [['x-if-unmodified-since', time]],
+ }
+ ]);
+
+ equal(pq.lastModified, time + 200);
+
+ run_next_test();
+});
+
+// Test we do the right thing when the batch record limit is exceeded.
+add_test(function test_max_records_batch() {
+ let config = {
+ max_post_bytes: 1000,
+ max_post_records: 3,
+ max_batch_bytes: 10000,
+ max_batch_records: 5,
+ }
+
+ const time0 = 11111111;
+ const time1 = 22222222;
+ function* responseGenerator() {
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time0, 'x-weave-timestamp': time0 + 100 },
+ };
+ yield { success: true, status: 202, obj: { batch: 1234 },
+ headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 },
+ };
+ yield { success: true, status: 202, obj: { batch: 5678 },
+ headers: { 'x-last-modified': time1, 'x-weave-timestamp': time1 + 100 },
+ };
+ yield { success: true, status: 202, obj: { batch: 5678 },
+ headers: { 'x-last-modified': time1 + 200, 'x-weave-timestamp': time1 + 200 },
+ };
+ }
+
+ let { pq, stats } = makePostQueue(config, time0, responseGenerator());
+
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+
+ ok(pq.enqueue(makeRecord(20)).enqueued);
+
+ pq.flush(true);
+
+ deepEqual(stats.posts, [
+ { // 3 records
+ nbytes: 64,
+ commit: false,
+ batch: "true",
+ headers: [['x-if-unmodified-since', time0]],
+ },{ // 2 records -- end batch1
+ nbytes: 43,
+ commit: true,
+ batch: 1234,
+ headers: [['x-if-unmodified-since', time0]],
+ }, { // 3 records
+ nbytes: 64,
+ commit: false,
+ batch: "true",
+ headers: [['x-if-unmodified-since', time1]],
+ },{ // 1 record -- end batch2
+ nbytes: 22,
+ commit: true,
+ batch: 5678,
+ headers: [['x-if-unmodified-since', time1]],
+ },
+ ]);
+
+ equal(pq.lastModified, time1 + 200);
+
+ run_next_test();
+}); \ No newline at end of file
diff --git a/services/sync/tests/unit/test_prefs_store.js b/services/sync/tests/unit/test_prefs_store.js
new file mode 100644
index 000000000..9c321bceb
--- /dev/null
+++ b/services/sync/tests/unit/test_prefs_store.js
@@ -0,0 +1,168 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/LightweightThemeManager.jsm");
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-sync/engines/prefs.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+const PREFS_GUID = CommonUtils.encodeBase64URL(Services.appinfo.ID);
+
+loadAddonTestFunctions();
+startupManager();
+
+function makePersona(id) {
+ return {
+ id: id || Math.random().toString(),
+ name: Math.random().toString(),
+ headerURL: "http://localhost:1234/a"
+ };
+}
+
+function run_test() {
+ _("Test fixtures.");
+ // read our custom prefs file before doing anything.
+ Services.prefs.readUserPrefs(do_get_file("prefs_test_prefs_store.js"));
+ // Now we've read from this file, any writes the pref service makes will be
+ // back to this prefs_test_prefs_store.js directly in the obj dir. This
+ // upsets things in confusing ways :) We avoid this by explicitly telling the
+ // pref service to use a file in our profile dir.
+ let prefFile = do_get_profile();
+ prefFile.append("prefs.js");
+ Services.prefs.savePrefFile(prefFile);
+ Services.prefs.readUserPrefs(prefFile);
+
+ let store = Service.engineManager.get("prefs")._store;
+ let prefs = new Preferences();
+ try {
+
+ _("The GUID corresponds to XUL App ID.");
+ let allIDs = store.getAllIDs();
+ let ids = Object.keys(allIDs);
+ do_check_eq(ids.length, 1);
+ do_check_eq(ids[0], PREFS_GUID);
+ do_check_true(allIDs[PREFS_GUID], true);
+
+ do_check_true(store.itemExists(PREFS_GUID));
+ do_check_false(store.itemExists("random-gibberish"));
+
+ _("Unknown prefs record is created as deleted.");
+ let record = store.createRecord("random-gibberish", "prefs");
+ do_check_true(record.deleted);
+
+ _("Prefs record contains only prefs that should be synced.");
+ record = store.createRecord(PREFS_GUID, "prefs");
+ do_check_eq(record.value["testing.int"], 123);
+ do_check_eq(record.value["testing.string"], "ohai");
+ do_check_eq(record.value["testing.bool"], true);
+ // non-existing prefs get null as the value
+ do_check_eq(record.value["testing.nonexistent"], null);
+ // as do prefs that have a default value.
+ do_check_eq(record.value["testing.default"], null);
+ do_check_false("testing.turned.off" in record.value);
+ do_check_false("testing.not.turned.on" in record.value);
+
+ _("Prefs record contains non-default pref sync prefs too.");
+ do_check_eq(record.value["services.sync.prefs.sync.testing.int"], null);
+ do_check_eq(record.value["services.sync.prefs.sync.testing.string"], null);
+ do_check_eq(record.value["services.sync.prefs.sync.testing.bool"], null);
+ do_check_eq(record.value["services.sync.prefs.sync.testing.dont.change"], null);
+ // but this one is a user_pref so *will* be synced.
+ do_check_eq(record.value["services.sync.prefs.sync.testing.turned.off"], false);
+ do_check_eq(record.value["services.sync.prefs.sync.testing.nonexistent"], null);
+ do_check_eq(record.value["services.sync.prefs.sync.testing.default"], null);
+
+ _("Update some prefs, including one that's to be reset/deleted.");
+ Svc.Prefs.set("testing.deleteme", "I'm going to be deleted!");
+ record = new PrefRec("prefs", PREFS_GUID);
+ record.value = {
+ "testing.int": 42,
+ "testing.string": "im in ur prefs",
+ "testing.bool": false,
+ "testing.deleteme": null,
+ "testing.somepref": "im a new pref from other device",
+ "services.sync.prefs.sync.testing.somepref": true
+ };
+ store.update(record);
+ do_check_eq(prefs.get("testing.int"), 42);
+ do_check_eq(prefs.get("testing.string"), "im in ur prefs");
+ do_check_eq(prefs.get("testing.bool"), false);
+ do_check_eq(prefs.get("testing.deleteme"), undefined);
+ do_check_eq(prefs.get("testing.dont.change"), "Please don't change me.");
+ do_check_eq(prefs.get("testing.somepref"), "im a new pref from other device");
+ do_check_eq(Svc.Prefs.get("prefs.sync.testing.somepref"), true);
+
+ _("Enable persona");
+ // Ensure we don't go to the network to fetch personas and end up leaking
+ // stuff.
+ Services.io.offline = true;
+ do_check_false(!!prefs.get("lightweightThemes.selectedThemeID"));
+ do_check_eq(LightweightThemeManager.currentTheme, null);
+
+ let persona1 = makePersona();
+ let persona2 = makePersona();
+ let usedThemes = JSON.stringify([persona1, persona2]);
+ record.value = {
+ "lightweightThemes.selectedThemeID": persona1.id,
+ "lightweightThemes.usedThemes": usedThemes
+ };
+ store.update(record);
+ do_check_eq(prefs.get("lightweightThemes.selectedThemeID"), persona1.id);
+ do_check_true(Utils.deepEquals(LightweightThemeManager.currentTheme,
+ persona1));
+
+ _("Disable persona");
+ record.value = {
+ "lightweightThemes.selectedThemeID": null,
+ "lightweightThemes.usedThemes": usedThemes
+ };
+ store.update(record);
+ do_check_false(!!prefs.get("lightweightThemes.selectedThemeID"));
+ do_check_eq(LightweightThemeManager.currentTheme, null);
+
+ _("Only the current app's preferences are applied.");
+ record = new PrefRec("prefs", "some-fake-app");
+ record.value = {
+ "testing.int": 98
+ };
+ store.update(record);
+ do_check_eq(prefs.get("testing.int"), 42);
+
+ _("The light-weight theme preference is handled correctly.");
+ let lastThemeID = undefined;
+ let orig_updateLightWeightTheme = store._updateLightWeightTheme;
+ store._updateLightWeightTheme = function(themeID) {
+ lastThemeID = themeID;
+ }
+ try {
+ record = new PrefRec("prefs", PREFS_GUID);
+ record.value = {
+ "testing.int": 42,
+ };
+ store.update(record);
+ do_check_true(lastThemeID === undefined,
+ "should not have tried to change the theme with an unrelated pref.");
+ Services.prefs.setCharPref("lightweightThemes.selectedThemeID", "foo");
+ record.value = {
+ "lightweightThemes.selectedThemeID": "foo",
+ };
+ store.update(record);
+ do_check_true(lastThemeID === undefined,
+ "should not have tried to change the theme when the incoming pref matches current value.");
+
+ record.value = {
+ "lightweightThemes.selectedThemeID": "bar",
+ };
+ store.update(record);
+ do_check_eq(lastThemeID, "bar",
+ "should have tried to change the theme when the incoming pref was different.");
+ } finally {
+ store._updateLightWeightTheme = orig_updateLightWeightTheme;
+ }
+ } finally {
+ prefs.resetBranch("");
+ }
+}
diff --git a/services/sync/tests/unit/test_prefs_tracker.js b/services/sync/tests/unit/test_prefs_tracker.js
new file mode 100644
index 000000000..17ccaa43e
--- /dev/null
+++ b/services/sync/tests/unit/test_prefs_tracker.js
@@ -0,0 +1,88 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines/prefs.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ let engine = Service.engineManager.get("prefs");
+ let tracker = engine._tracker;
+
+ // Don't write out by default.
+ tracker.persistChangedIDs = false;
+
+ let prefs = new Preferences();
+
+ try {
+
+ _("tracker.modified corresponds to preference.");
+ do_check_eq(Svc.Prefs.get("engine.prefs.modified"), undefined);
+ do_check_false(tracker.modified);
+
+ tracker.modified = true;
+ do_check_eq(Svc.Prefs.get("engine.prefs.modified"), true);
+ do_check_true(tracker.modified);
+
+ _("Engine's getChangedID() just returns the one GUID we have.");
+ let changedIDs = engine.getChangedIDs();
+ let ids = Object.keys(changedIDs);
+ do_check_eq(ids.length, 1);
+ do_check_eq(ids[0], CommonUtils.encodeBase64URL(Services.appinfo.ID));
+
+ Svc.Prefs.set("engine.prefs.modified", false);
+ do_check_false(tracker.modified);
+
+ _("No modified state, so no changed IDs.");
+ do_check_empty(engine.getChangedIDs());
+
+ _("Initial score is 0");
+ do_check_eq(tracker.score, 0);
+
+ _("Test fixtures.");
+ Svc.Prefs.set("prefs.sync.testing.int", true);
+
+ _("Test fixtures haven't upped the tracker score yet because it hasn't started tracking yet.");
+ do_check_eq(tracker.score, 0);
+
+ _("Tell the tracker to start tracking changes.");
+ Svc.Obs.notify("weave:engine:start-tracking");
+ prefs.set("testing.int", 23);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
+ do_check_eq(tracker.modified, true);
+
+ _("Clearing changed IDs reset modified status.");
+ tracker.clearChangedIDs();
+ do_check_eq(tracker.modified, false);
+
+ _("Resetting a pref ups the score, too.");
+ prefs.reset("testing.int");
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
+ do_check_eq(tracker.modified, true);
+ tracker.clearChangedIDs();
+
+ _("So does changing a pref sync pref.");
+ Svc.Prefs.set("prefs.sync.testing.int", false);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ do_check_eq(tracker.modified, true);
+ tracker.clearChangedIDs();
+
+ _("Now that the pref sync pref has been flipped, changes to it won't be picked up.");
+ prefs.set("testing.int", 42);
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ do_check_eq(tracker.modified, false);
+ tracker.clearChangedIDs();
+
+ _("Changing some other random pref won't do anything.");
+ prefs.set("testing.other", "blergh");
+ do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
+ do_check_eq(tracker.modified, false);
+
+ } finally {
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ prefs.resetBranch("");
+ }
+}
diff --git a/services/sync/tests/unit/test_records_crypto.js b/services/sync/tests/unit/test_records_crypto.js
new file mode 100644
index 000000000..392a746ef
--- /dev/null
+++ b/services/sync/tests/unit/test_records_crypto.js
@@ -0,0 +1,182 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+var cryptoWrap;
+
+function crypted_resource_handler(metadata, response) {
+ let obj = {id: "resource",
+ modified: cryptoWrap.modified,
+ payload: JSON.stringify(cryptoWrap.payload)};
+ return httpd_basic_auth_handler(JSON.stringify(obj), metadata, response);
+}
+
+function prepareCryptoWrap(collection, id) {
+ let w = new CryptoWrapper();
+ w.cleartext.stuff = "my payload here";
+ w.collection = collection;
+ w.id = id;
+ return w;
+}
+
+function run_test() {
+ let server;
+ do_test_pending();
+
+ ensureLegacyIdentityManager();
+ Service.identity.username = "john@example.com";
+ Service.identity.syncKey = "a-abcde-abcde-abcde-abcde-abcde";
+ let keyBundle = Service.identity.syncKeyBundle;
+
+ try {
+ let log = Log.repository.getLogger("Test");
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ log.info("Setting up server and authenticator");
+
+ server = httpd_setup({"/steam/resource": crypted_resource_handler});
+
+ log.info("Creating a record");
+
+ let cryptoUri = "http://localhost:8080/crypto/steam";
+ cryptoWrap = prepareCryptoWrap("steam", "resource");
+
+ log.info("cryptoWrap: " + cryptoWrap.toString());
+
+ log.info("Encrypting a record");
+
+ cryptoWrap.encrypt(keyBundle);
+ log.info("Ciphertext is " + cryptoWrap.ciphertext);
+ do_check_true(cryptoWrap.ciphertext != null);
+
+ let firstIV = cryptoWrap.IV;
+
+ log.info("Decrypting the record");
+
+ let payload = cryptoWrap.decrypt(keyBundle);
+ do_check_eq(payload.stuff, "my payload here");
+ do_check_neq(payload, cryptoWrap.payload); // wrap.data.payload is the encrypted one
+
+ log.info("Make sure multiple decrypts cause failures");
+ let error = "";
+ try {
+ payload = cryptoWrap.decrypt(keyBundle);
+ }
+ catch(ex) {
+ error = ex;
+ }
+ do_check_eq(error, "No ciphertext: nothing to decrypt?");
+
+ log.info("Re-encrypting the record with alternate payload");
+
+ cryptoWrap.cleartext.stuff = "another payload";
+ cryptoWrap.encrypt(keyBundle);
+ let secondIV = cryptoWrap.IV;
+ payload = cryptoWrap.decrypt(keyBundle);
+ do_check_eq(payload.stuff, "another payload");
+
+ log.info("Make sure multiple encrypts use different IVs");
+ do_check_neq(firstIV, secondIV);
+
+ log.info("Make sure differing ids cause failures");
+ cryptoWrap.encrypt(keyBundle);
+ cryptoWrap.data.id = "other";
+ error = "";
+ try {
+ cryptoWrap.decrypt(keyBundle);
+ }
+ catch(ex) {
+ error = ex;
+ }
+ do_check_eq(error, "Record id mismatch: resource != other");
+
+ log.info("Make sure wrong hmacs cause failures");
+ cryptoWrap.encrypt(keyBundle);
+ cryptoWrap.hmac = "foo";
+ error = "";
+ try {
+ cryptoWrap.decrypt(keyBundle);
+ }
+ catch(ex) {
+ error = ex;
+ }
+ do_check_eq(error.substr(0, 42), "Record SHA256 HMAC mismatch: should be foo");
+
+ // Checking per-collection keys and default key handling.
+
+ generateNewKeys(Service.collectionKeys);
+ let bu = "http://localhost:8080/storage/bookmarks/foo";
+ let bookmarkItem = prepareCryptoWrap("bookmarks", "foo");
+ bookmarkItem.encrypt(Service.collectionKeys.keyForCollection("bookmarks"));
+ log.info("Ciphertext is " + bookmarkItem.ciphertext);
+ do_check_true(bookmarkItem.ciphertext != null);
+ log.info("Decrypting the record explicitly with the default key.");
+ do_check_eq(bookmarkItem.decrypt(Service.collectionKeys._default).stuff, "my payload here");
+
+ // Per-collection keys.
+ // Generate a key for "bookmarks".
+ generateNewKeys(Service.collectionKeys, ["bookmarks"]);
+ bookmarkItem = prepareCryptoWrap("bookmarks", "foo");
+ do_check_eq(bookmarkItem.collection, "bookmarks");
+
+ // Encrypt. This'll use the "bookmarks" encryption key, because we have a
+ // special key for it. The same key will need to be used for decryption.
+ bookmarkItem.encrypt(Service.collectionKeys.keyForCollection("bookmarks"));
+ do_check_true(bookmarkItem.ciphertext != null);
+
+ // Attempt to use the default key, because this is a collision that could
+ // conceivably occur in the real world. Decryption will error, because
+ // it's not the bookmarks key.
+ let err;
+ try {
+ bookmarkItem.decrypt(Service.collectionKeys._default);
+ } catch (ex) {
+ err = ex;
+ }
+ do_check_eq("Record SHA256 HMAC mismatch", err.substr(0, 27));
+
+ // Explicitly check that it's using the bookmarks key.
+ // This should succeed.
+ do_check_eq(bookmarkItem.decrypt(Service.collectionKeys.keyForCollection("bookmarks")).stuff,
+ "my payload here");
+
+ do_check_true(Service.collectionKeys.hasKeysFor(["bookmarks"]));
+
+ // Add a key for some new collection and verify that it isn't the
+ // default key.
+ do_check_false(Service.collectionKeys.hasKeysFor(["forms"]));
+ do_check_false(Service.collectionKeys.hasKeysFor(["bookmarks", "forms"]));
+ let oldFormsKey = Service.collectionKeys.keyForCollection("forms");
+ do_check_eq(oldFormsKey, Service.collectionKeys._default);
+ let newKeys = Service.collectionKeys.ensureKeysFor(["forms"]);
+ do_check_true(newKeys.hasKeysFor(["forms"]));
+ do_check_true(newKeys.hasKeysFor(["bookmarks", "forms"]));
+ let newFormsKey = newKeys.keyForCollection("forms");
+ do_check_neq(newFormsKey, oldFormsKey);
+
+ // Verify that this doesn't overwrite keys
+ let regetKeys = newKeys.ensureKeysFor(["forms"]);
+ do_check_eq(regetKeys.keyForCollection("forms"), newFormsKey);
+
+ const emptyKeys = new CollectionKeyManager();
+ payload = {
+ default: Service.collectionKeys._default.keyPairB64,
+ collections: {}
+ };
+ // Verify that not passing `modified` doesn't throw
+ emptyKeys.setContents(payload, null);
+
+ log.info("Done!");
+ }
+ finally {
+ server.stop(do_test_finished);
+ }
+}
diff --git a/services/sync/tests/unit/test_records_wbo.js b/services/sync/tests/unit/test_records_wbo.js
new file mode 100644
index 000000000..e3277b0a7
--- /dev/null
+++ b/services/sync/tests/unit/test_records_wbo.js
@@ -0,0 +1,86 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+
+function test_toJSON() {
+ _("Create a record, for now without a TTL.");
+ let wbo = new WBORecord("coll", "a_record");
+ wbo.modified = 12345;
+ wbo.sortindex = 42;
+ wbo.payload = {};
+
+ _("Verify that the JSON representation contains the WBO properties, but not TTL.");
+ let json = JSON.parse(JSON.stringify(wbo));
+ do_check_eq(json.modified, 12345);
+ do_check_eq(json.sortindex, 42);
+ do_check_eq(json.payload, "{}");
+ do_check_false("ttl" in json);
+
+ _("Set a TTL, make sure it's present in the JSON representation.");
+ wbo.ttl = 30*60;
+ json = JSON.parse(JSON.stringify(wbo));
+ do_check_eq(json.ttl, 30*60);
+}
+
+
+function test_fetch() {
+ let record = {id: "asdf-1234-asdf-1234",
+ modified: 2454725.98283,
+ payload: JSON.stringify({cheese: "roquefort"})};
+ let record2 = {id: "record2",
+ modified: 2454725.98284,
+ payload: JSON.stringify({cheese: "gruyere"})};
+ let coll = [{id: "record2",
+ modified: 2454725.98284,
+ payload: JSON.stringify({cheese: "gruyere"})}];
+
+ _("Setting up server.");
+ let server = httpd_setup({
+ "/record": httpd_handler(200, "OK", JSON.stringify(record)),
+ "/record2": httpd_handler(200, "OK", JSON.stringify(record2)),
+ "/coll": httpd_handler(200, "OK", JSON.stringify(coll))
+ });
+ do_test_pending();
+
+ try {
+ _("Fetching a WBO record");
+ let rec = new WBORecord("coll", "record");
+ rec.fetch(Service.resource(server.baseURI + "/record"));
+ do_check_eq(rec.id, "asdf-1234-asdf-1234"); // NOT "record"!
+
+ do_check_eq(rec.modified, 2454725.98283);
+ do_check_eq(typeof(rec.payload), "object");
+ do_check_eq(rec.payload.cheese, "roquefort");
+
+ _("Fetching a WBO record using the record manager");
+ let rec2 = Service.recordManager.get(server.baseURI + "/record2");
+ do_check_eq(rec2.id, "record2");
+ do_check_eq(rec2.modified, 2454725.98284);
+ do_check_eq(typeof(rec2.payload), "object");
+ do_check_eq(rec2.payload.cheese, "gruyere");
+ do_check_eq(Service.recordManager.response.status, 200);
+
+ // Testing collection extraction.
+ _("Extracting collection.");
+ let rec3 = new WBORecord("tabs", "foo"); // Create through constructor.
+ do_check_eq(rec3.collection, "tabs");
+
+ } finally {
+ server.stop(do_test_finished);
+ }
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ ensureLegacyIdentityManager();
+
+ test_toJSON();
+ test_fetch();
+}
diff --git a/services/sync/tests/unit/test_resource.js b/services/sync/tests/unit/test_resource.js
new file mode 100644
index 000000000..8f5534c92
--- /dev/null
+++ b/services/sync/tests/unit/test_resource.js
@@ -0,0 +1,502 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/util.js");
+
+var logger;
+
+var fetched = false;
+function server_open(metadata, response) {
+ let body;
+ if (metadata.method == "GET") {
+ fetched = true;
+ body = "This path exists";
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ } else {
+ body = "Wrong request method";
+ response.setStatusLine(metadata.httpVersion, 405, "Method Not Allowed");
+ }
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_protected(metadata, response) {
+ let body;
+
+ if (basic_auth_matches(metadata, "guest", "guest")) {
+ body = "This path exists and is protected";
+ response.setStatusLine(metadata.httpVersion, 200, "OK, authorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ } else {
+ body = "This path exists and is protected - failed";
+ response.setStatusLine(metadata.httpVersion, 401, "Unauthorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ }
+
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_404(metadata, response) {
+ let body = "File not found";
+ response.setStatusLine(metadata.httpVersion, 404, "Not Found");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+var pacFetched = false;
+function server_pac(metadata, response) {
+ pacFetched = true;
+ let body = 'function FindProxyForURL(url, host) { return "DIRECT"; }';
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/x-ns-proxy-autoconfig", false);
+ response.bodyOutputStream.write(body, body.length);
+}
+
+
+var sample_data = {
+ some: "sample_data",
+ injson: "format",
+ number: 42
+};
+
+function server_upload(metadata, response) {
+ let body;
+
+ let input = readBytesFromInputStream(metadata.bodyInputStream);
+ if (input == JSON.stringify(sample_data)) {
+ body = "Valid data upload via " + metadata.method;
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ } else {
+ body = "Invalid data upload via " + metadata.method + ': ' + input;
+ response.setStatusLine(metadata.httpVersion, 500, "Internal Server Error");
+ }
+
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_delete(metadata, response) {
+ let body;
+ if (metadata.method == "DELETE") {
+ body = "This resource has been deleted";
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ } else {
+ body = "Wrong request method";
+ response.setStatusLine(metadata.httpVersion, 405, "Method Not Allowed");
+ }
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_json(metadata, response) {
+ let body = JSON.stringify(sample_data);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+const TIMESTAMP = 1274380461;
+
+function server_timestamp(metadata, response) {
+ let body = "Thank you for your request";
+ response.setHeader("X-Weave-Timestamp", ''+TIMESTAMP, false);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_backoff(metadata, response) {
+ let body = "Hey, back off!";
+ response.setHeader("X-Weave-Backoff", '600', false);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_quota_notice(request, response) {
+ let body = "You're approaching quota.";
+ response.setHeader("X-Weave-Quota-Remaining", '1048576', false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_quota_error(request, response) {
+ let body = "14";
+ response.setHeader("X-Weave-Quota-Remaining", '-1024', false);
+ response.setStatusLine(request.httpVersion, 400, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_headers(metadata, response) {
+ let ignore_headers = ["host", "user-agent", "accept", "accept-language",
+ "accept-encoding", "accept-charset", "keep-alive",
+ "connection", "pragma", "cache-control",
+ "content-length"];
+ let headers = metadata.headers;
+ let header_names = [];
+ while (headers.hasMoreElements()) {
+ let header = headers.getNext().toString();
+ if (ignore_headers.indexOf(header) == -1) {
+ header_names.push(header);
+ }
+ }
+ header_names = header_names.sort();
+
+ headers = {};
+ for (let header of header_names) {
+ headers[header] = metadata.getHeader(header);
+ }
+ let body = JSON.stringify(headers);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function run_test() {
+ initTestLogging("Trace");
+
+ do_test_pending();
+
+ let logger = Log.repository.getLogger('Test');
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ let server = httpd_setup({
+ "/open": server_open,
+ "/protected": server_protected,
+ "/404": server_404,
+ "/upload": server_upload,
+ "/delete": server_delete,
+ "/json": server_json,
+ "/timestamp": server_timestamp,
+ "/headers": server_headers,
+ "/backoff": server_backoff,
+ "/pac1": server_pac,
+ "/quota-notice": server_quota_notice,
+ "/quota-error": server_quota_error
+ });
+
+ Svc.Prefs.set("network.numRetries", 1); // speed up test
+
+ // This apparently has to come first in order for our PAC URL to be hit.
+ // Don't put any other HTTP requests earlier in the file!
+ _("Testing handling of proxy auth redirection.");
+ PACSystemSettings.PACURI = server.baseURI + "/pac1";
+ installFakePAC();
+ let proxiedRes = new Resource(server.baseURI + "/open");
+ let content = proxiedRes.get();
+ do_check_true(pacFetched);
+ do_check_true(fetched);
+ do_check_eq(content, "This path exists");
+ pacFetched = fetched = false;
+ uninstallFakePAC();
+
+ _("Resource object members");
+ let res = new Resource(server.baseURI + "/open");
+ do_check_true(res.uri instanceof Ci.nsIURI);
+ do_check_eq(res.uri.spec, server.baseURI + "/open");
+ do_check_eq(res.spec, server.baseURI + "/open");
+ do_check_eq(typeof res.headers, "object");
+ do_check_eq(typeof res.authenticator, "object");
+ // Initially res.data is null since we haven't performed a GET or
+ // PUT/POST request yet.
+ do_check_eq(res.data, null);
+
+ _("GET a non-password-protected resource");
+ content = res.get();
+ do_check_eq(content, "This path exists");
+ do_check_eq(content.status, 200);
+ do_check_true(content.success);
+ // res.data has been updated with the result from the request
+ do_check_eq(res.data, content);
+
+ // Observe logging messages.
+ logger = res._log;
+ let dbg = logger.debug;
+ let debugMessages = [];
+ logger.debug = function (msg) {
+ debugMessages.push(msg);
+ dbg.call(this, msg);
+ }
+
+ // Since we didn't receive proper JSON data, accessing content.obj
+ // will result in a SyntaxError from JSON.parse.
+ // Furthermore, we'll have logged.
+ let didThrow = false;
+ try {
+ content.obj;
+ } catch (ex) {
+ didThrow = true;
+ }
+ do_check_true(didThrow);
+ do_check_eq(debugMessages.length, 1);
+ do_check_eq(debugMessages[0],
+ "Parse fail: Response body starts: \"\"This path exists\"\".");
+ logger.debug = dbg;
+
+ _("Test that the BasicAuthenticator doesn't screw up header case.");
+ let res1 = new Resource(server.baseURI + "/foo");
+ res1.setHeader("Authorization", "Basic foobar");
+ do_check_eq(res1.headers["authorization"], "Basic foobar");
+
+ _("GET a password protected resource (test that it'll fail w/o pass, no throw)");
+ let res2 = new Resource(server.baseURI + "/protected");
+ content = res2.get();
+ do_check_eq(content, "This path exists and is protected - failed");
+ do_check_eq(content.status, 401);
+ do_check_false(content.success);
+
+ _("GET a password protected resource");
+ let res3 = new Resource(server.baseURI + "/protected");
+ let identity = new IdentityManager();
+ let auth = identity.getBasicResourceAuthenticator("guest", "guest");
+ res3.authenticator = auth;
+ do_check_eq(res3.authenticator, auth);
+ content = res3.get();
+ do_check_eq(content, "This path exists and is protected");
+ do_check_eq(content.status, 200);
+ do_check_true(content.success);
+
+ _("GET a non-existent resource (test that it'll fail, but not throw)");
+ let res4 = new Resource(server.baseURI + "/404");
+ content = res4.get();
+ do_check_eq(content, "File not found");
+ do_check_eq(content.status, 404);
+ do_check_false(content.success);
+
+ // Check some headers of the 404 response
+ do_check_eq(content.headers.connection, "close");
+ do_check_eq(content.headers.server, "httpd.js");
+ do_check_eq(content.headers["content-length"], 14);
+
+ _("PUT to a resource (string)");
+ let res5 = new Resource(server.baseURI + "/upload");
+ content = res5.put(JSON.stringify(sample_data));
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("PUT to a resource (object)");
+ content = res5.put(sample_data);
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("PUT without data arg (uses resource.data) (string)");
+ res5.data = JSON.stringify(sample_data);
+ content = res5.put();
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("PUT without data arg (uses resource.data) (object)");
+ res5.data = sample_data;
+ content = res5.put();
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("POST to a resource (string)");
+ content = res5.post(JSON.stringify(sample_data));
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("POST to a resource (object)");
+ content = res5.post(sample_data);
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("POST without data arg (uses resource.data) (string)");
+ res5.data = JSON.stringify(sample_data);
+ content = res5.post();
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("POST without data arg (uses resource.data) (object)");
+ res5.data = sample_data;
+ content = res5.post();
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res5.data, content);
+
+ _("DELETE a resource");
+ let res6 = new Resource(server.baseURI + "/delete");
+ content = res6.delete();
+ do_check_eq(content, "This resource has been deleted")
+ do_check_eq(content.status, 200);
+
+ _("JSON conversion of response body");
+ let res7 = new Resource(server.baseURI + "/json");
+ content = res7.get();
+ do_check_eq(content, JSON.stringify(sample_data));
+ do_check_eq(content.status, 200);
+ do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
+
+ _("X-Weave-Timestamp header updates AsyncResource.serverTime");
+ // Before having received any response containing the
+ // X-Weave-Timestamp header, AsyncResource.serverTime is null.
+ do_check_eq(AsyncResource.serverTime, null);
+ let res8 = new Resource(server.baseURI + "/timestamp");
+ content = res8.get();
+ do_check_eq(AsyncResource.serverTime, TIMESTAMP);
+
+ _("GET: no special request headers");
+ let res9 = new Resource(server.baseURI + "/headers");
+ content = res9.get();
+ do_check_eq(content, '{}');
+
+ _("PUT: Content-Type defaults to text/plain");
+ content = res9.put('data');
+ do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
+
+ _("POST: Content-Type defaults to text/plain");
+ content = res9.post('data');
+ do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
+
+ _("setHeader(): setting simple header");
+ res9.setHeader('X-What-Is-Weave', 'awesome');
+ do_check_eq(res9.headers['x-what-is-weave'], 'awesome');
+ content = res9.get();
+ do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
+
+ _("setHeader(): setting multiple headers, overwriting existing header");
+ res9.setHeader('X-WHAT-is-Weave', 'more awesomer');
+ res9.setHeader('X-Another-Header', 'hello world');
+ do_check_eq(res9.headers['x-what-is-weave'], 'more awesomer');
+ do_check_eq(res9.headers['x-another-header'], 'hello world');
+ content = res9.get();
+ do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
+ "x-what-is-weave": "more awesomer"}));
+
+ _("Setting headers object");
+ res9.headers = {};
+ content = res9.get();
+ do_check_eq(content, "{}");
+
+ _("PUT/POST: override default Content-Type");
+ res9.setHeader('Content-Type', 'application/foobar');
+ do_check_eq(res9.headers['content-type'], 'application/foobar');
+ content = res9.put('data');
+ do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
+ content = res9.post('data');
+ do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
+
+
+ _("X-Weave-Backoff header notifies observer");
+ let backoffInterval;
+ function onBackoff(subject, data) {
+ backoffInterval = subject;
+ }
+ Observers.add("weave:service:backoff:interval", onBackoff);
+
+ let res10 = new Resource(server.baseURI + "/backoff");
+ content = res10.get();
+ do_check_eq(backoffInterval, 600);
+
+
+ _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
+ let quotaValue;
+ function onQuota(subject, data) {
+ quotaValue = subject;
+ }
+ Observers.add("weave:service:quota:remaining", onQuota);
+
+ res10 = new Resource(server.baseURI + "/quota-error");
+ content = res10.get();
+ do_check_eq(content.status, 400);
+ do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
+
+ res10 = new Resource(server.baseURI + "/quota-notice");
+ content = res10.get();
+ do_check_eq(content.status, 200);
+ do_check_eq(quotaValue, 1048576);
+
+
+ _("Error handling in _request() preserves exception information");
+ let error;
+ let res11 = new Resource("http://localhost:12345/does/not/exist");
+ try {
+ content = res11.get();
+ } catch(ex) {
+ error = ex;
+ }
+ do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
+ do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
+ do_check_eq(typeof error.stack, "string");
+
+ _("Checking handling of errors in onProgress.");
+ let res18 = new Resource(server.baseURI + "/json");
+ let onProgress = function(rec) {
+ // Provoke an XPC exception without a Javascript wrapper.
+ Services.io.newURI("::::::::", null, null);
+ };
+ res18._onProgress = onProgress;
+ let oldWarn = res18._log.warn;
+ let warnings = [];
+ res18._log.warn = function(msg) { warnings.push(msg) };
+ error = undefined;
+ try {
+ content = res18.get();
+ } catch (ex) {
+ error = ex;
+ }
+
+ // It throws and logs.
+ do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
+ do_check_eq(error, "Error: NS_ERROR_MALFORMED_URI");
+ // Note the strings haven't been formatted yet, but that's OK for this test.
+ do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
+ do_check_eq(warnings.pop(),
+ "Got exception calling onProgress handler during fetch of " +
+ server.baseURI + "/json");
+
+ // And this is what happens if JS throws an exception.
+ res18 = new Resource(server.baseURI + "/json");
+ onProgress = function(rec) {
+ throw "BOO!";
+ };
+ res18._onProgress = onProgress;
+ oldWarn = res18._log.warn;
+ warnings = [];
+ res18._log.warn = function(msg) { warnings.push(msg) };
+ error = undefined;
+ try {
+ content = res18.get();
+ } catch (ex) {
+ error = ex;
+ }
+
+ // It throws and logs.
+ do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
+ do_check_eq(error, "Error: NS_ERROR_XPC_JS_THREW_STRING");
+ do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
+ do_check_eq(warnings.pop(),
+ "Got exception calling onProgress handler during fetch of " +
+ server.baseURI + "/json");
+
+
+ _("Ensure channel timeouts are thrown appropriately.");
+ let res19 = new Resource(server.baseURI + "/json");
+ res19.ABORT_TIMEOUT = 0;
+ error = undefined;
+ try {
+ content = res19.get();
+ } catch (ex) {
+ error = ex;
+ }
+ do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
+
+ _("Testing URI construction.");
+ let args = [];
+ args.push("newer=" + 1234);
+ args.push("limit=" + 1234);
+ args.push("sort=" + 1234);
+
+ let query = "?" + args.join("&");
+
+ let uri1 = Utils.makeURI("http://foo/" + query)
+ .QueryInterface(Ci.nsIURL);
+ let uri2 = Utils.makeURI("http://foo/")
+ .QueryInterface(Ci.nsIURL);
+ uri2.query = query;
+ do_check_eq(uri1.query, uri2.query);
+ server.stop(do_test_finished);
+}
diff --git a/services/sync/tests/unit/test_resource_async.js b/services/sync/tests/unit/test_resource_async.js
new file mode 100644
index 000000000..0db91a1b5
--- /dev/null
+++ b/services/sync/tests/unit/test_resource_async.js
@@ -0,0 +1,730 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/util.js");
+
+var logger;
+
+var fetched = false;
+function server_open(metadata, response) {
+ let body;
+ if (metadata.method == "GET") {
+ fetched = true;
+ body = "This path exists";
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ } else {
+ body = "Wrong request method";
+ response.setStatusLine(metadata.httpVersion, 405, "Method Not Allowed");
+ }
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_protected(metadata, response) {
+ let body;
+
+ if (basic_auth_matches(metadata, "guest", "guest")) {
+ body = "This path exists and is protected";
+ response.setStatusLine(metadata.httpVersion, 200, "OK, authorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ } else {
+ body = "This path exists and is protected - failed";
+ response.setStatusLine(metadata.httpVersion, 401, "Unauthorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ }
+
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_404(metadata, response) {
+ let body = "File not found";
+ response.setStatusLine(metadata.httpVersion, 404, "Not Found");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+var pacFetched = false;
+function server_pac(metadata, response) {
+ _("Invoked PAC handler.");
+ pacFetched = true;
+ let body = 'function FindProxyForURL(url, host) { return "DIRECT"; }';
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/x-ns-proxy-autoconfig", false);
+ response.bodyOutputStream.write(body, body.length);
+}
+
+var sample_data = {
+ some: "sample_data",
+ injson: "format",
+ number: 42
+};
+
+function server_upload(metadata, response) {
+ let body;
+
+ let input = readBytesFromInputStream(metadata.bodyInputStream);
+ if (input == JSON.stringify(sample_data)) {
+ body = "Valid data upload via " + metadata.method;
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ } else {
+ body = "Invalid data upload via " + metadata.method + ': ' + input;
+ response.setStatusLine(metadata.httpVersion, 500, "Internal Server Error");
+ }
+
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_delete(metadata, response) {
+ let body;
+ if (metadata.method == "DELETE") {
+ body = "This resource has been deleted";
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ } else {
+ body = "Wrong request method";
+ response.setStatusLine(metadata.httpVersion, 405, "Method Not Allowed");
+ }
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_json(metadata, response) {
+ let body = JSON.stringify(sample_data);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+const TIMESTAMP = 1274380461;
+
+function server_timestamp(metadata, response) {
+ let body = "Thank you for your request";
+ response.setHeader("X-Weave-Timestamp", ''+TIMESTAMP, false);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_backoff(metadata, response) {
+ let body = "Hey, back off!";
+ response.setHeader("X-Weave-Backoff", '600', false);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_quota_notice(request, response) {
+ let body = "You're approaching quota.";
+ response.setHeader("X-Weave-Quota-Remaining", '1048576', false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_quota_error(request, response) {
+ let body = "14";
+ response.setHeader("X-Weave-Quota-Remaining", '-1024', false);
+ response.setStatusLine(request.httpVersion, 400, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function server_headers(metadata, response) {
+ let ignore_headers = ["host", "user-agent", "accept", "accept-language",
+ "accept-encoding", "accept-charset", "keep-alive",
+ "connection", "pragma", "cache-control",
+ "content-length"];
+ let headers = metadata.headers;
+ let header_names = [];
+ while (headers.hasMoreElements()) {
+ let header = headers.getNext().toString();
+ if (ignore_headers.indexOf(header) == -1) {
+ header_names.push(header);
+ }
+ }
+ header_names = header_names.sort();
+
+ headers = {};
+ for (let header of header_names) {
+ headers[header] = metadata.getHeader(header);
+ }
+ let body = JSON.stringify(headers);
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+var quotaValue;
+Observers.add("weave:service:quota:remaining",
+ function (subject) { quotaValue = subject; });
+
+function run_test() {
+ logger = Log.repository.getLogger('Test');
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ Svc.Prefs.set("network.numRetries", 1); // speed up test
+ run_next_test();
+}
+
+// This apparently has to come first in order for our PAC URL to be hit.
+// Don't put any other HTTP requests earlier in the file!
+add_test(function test_proxy_auth_redirect() {
+ _("Ensure that a proxy auth redirect (which switches out our channel) " +
+ "doesn't break AsyncResource.");
+ let server = httpd_setup({
+ "/open": server_open,
+ "/pac2": server_pac
+ });
+
+ PACSystemSettings.PACURI = server.baseURI + "/pac2";
+ installFakePAC();
+ let res = new AsyncResource(server.baseURI + "/open");
+ res.get(function (error, result) {
+ do_check_true(!error);
+ do_check_true(pacFetched);
+ do_check_true(fetched);
+ do_check_eq("This path exists", result);
+ pacFetched = fetched = false;
+ uninstallFakePAC();
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_new_channel() {
+ _("Ensure a redirect to a new channel is handled properly.");
+
+ let resourceRequested = false;
+ function resourceHandler(metadata, response) {
+ resourceRequested = true;
+
+ let body = "Test";
+ response.setHeader("Content-Type", "text/plain");
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ let locationURL;
+ function redirectHandler(metadata, response) {
+ let body = "Redirecting";
+ response.setStatusLine(metadata.httpVersion, 307, "TEMPORARY REDIRECT");
+ response.setHeader("Location", locationURL);
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ let server = httpd_setup({"/resource": resourceHandler,
+ "/redirect": redirectHandler});
+ locationURL = server.baseURI + "/resource";
+
+ let request = new AsyncResource(server.baseURI + "/redirect");
+ request.get(function onRequest(error, content) {
+ do_check_null(error);
+ do_check_true(resourceRequested);
+ do_check_eq(200, content.status);
+ do_check_true("content-type" in content.headers);
+ do_check_eq("text/plain", content.headers["content-type"]);
+
+ server.stop(run_next_test);
+ });
+});
+
+
+var server;
+
+add_test(function setup() {
+ server = httpd_setup({
+ "/open": server_open,
+ "/protected": server_protected,
+ "/404": server_404,
+ "/upload": server_upload,
+ "/delete": server_delete,
+ "/json": server_json,
+ "/timestamp": server_timestamp,
+ "/headers": server_headers,
+ "/backoff": server_backoff,
+ "/pac2": server_pac,
+ "/quota-notice": server_quota_notice,
+ "/quota-error": server_quota_error
+ });
+
+ run_next_test();
+});
+
+add_test(function test_members() {
+ _("Resource object members");
+ let uri = server.baseURI + "/open";
+ let res = new AsyncResource(uri);
+ do_check_true(res.uri instanceof Ci.nsIURI);
+ do_check_eq(res.uri.spec, uri);
+ do_check_eq(res.spec, uri);
+ do_check_eq(typeof res.headers, "object");
+ do_check_eq(typeof res.authenticator, "object");
+ // Initially res.data is null since we haven't performed a GET or
+ // PUT/POST request yet.
+ do_check_eq(res.data, null);
+
+ run_next_test();
+});
+
+add_test(function test_get() {
+ _("GET a non-password-protected resource");
+ let res = new AsyncResource(server.baseURI + "/open");
+ res.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "This path exists");
+ do_check_eq(content.status, 200);
+ do_check_true(content.success);
+ // res.data has been updated with the result from the request
+ do_check_eq(res.data, content);
+
+ // Observe logging messages.
+ let logger = res._log;
+ let dbg = logger.debug;
+ let debugMessages = [];
+ logger.debug = function (msg) {
+ debugMessages.push(msg);
+ dbg.call(this, msg);
+ }
+
+ // Since we didn't receive proper JSON data, accessing content.obj
+ // will result in a SyntaxError from JSON.parse
+ let didThrow = false;
+ try {
+ content.obj;
+ } catch (ex) {
+ didThrow = true;
+ }
+ do_check_true(didThrow);
+ do_check_eq(debugMessages.length, 1);
+ do_check_eq(debugMessages[0],
+ "Parse fail: Response body starts: \"\"This path exists\"\".");
+ logger.debug = dbg;
+
+ run_next_test();
+ });
+});
+
+add_test(function test_basicauth() {
+ _("Test that the BasicAuthenticator doesn't screw up header case.");
+ let res1 = new AsyncResource(server.baseURI + "/foo");
+ res1.setHeader("Authorization", "Basic foobar");
+ do_check_eq(res1._headers["authorization"], "Basic foobar");
+ do_check_eq(res1.headers["authorization"], "Basic foobar");
+
+ run_next_test();
+});
+
+add_test(function test_get_protected_fail() {
+ _("GET a password protected resource (test that it'll fail w/o pass, no throw)");
+ let res2 = new AsyncResource(server.baseURI + "/protected");
+ res2.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "This path exists and is protected - failed");
+ do_check_eq(content.status, 401);
+ do_check_false(content.success);
+ run_next_test();
+ });
+});
+
+add_test(function test_get_protected_success() {
+ _("GET a password protected resource");
+ let identity = new IdentityManager();
+ let auth = identity.getBasicResourceAuthenticator("guest", "guest");
+ let res3 = new AsyncResource(server.baseURI + "/protected");
+ res3.authenticator = auth;
+ do_check_eq(res3.authenticator, auth);
+ res3.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "This path exists and is protected");
+ do_check_eq(content.status, 200);
+ do_check_true(content.success);
+ run_next_test();
+ });
+});
+
+add_test(function test_get_404() {
+ _("GET a non-existent resource (test that it'll fail, but not throw)");
+ let res4 = new AsyncResource(server.baseURI + "/404");
+ res4.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "File not found");
+ do_check_eq(content.status, 404);
+ do_check_false(content.success);
+
+ // Check some headers of the 404 response
+ do_check_eq(content.headers.connection, "close");
+ do_check_eq(content.headers.server, "httpd.js");
+ do_check_eq(content.headers["content-length"], 14);
+
+ run_next_test();
+ });
+});
+
+add_test(function test_put_string() {
+ _("PUT to a resource (string)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.put(JSON.stringify(sample_data), function(error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_put_object() {
+ _("PUT to a resource (object)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.put(sample_data, function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_put_data_string() {
+ _("PUT without data arg (uses resource.data) (string)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.data = JSON.stringify(sample_data);
+ res_upload.put(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_put_data_object() {
+ _("PUT without data arg (uses resource.data) (object)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.data = sample_data;
+ res_upload.put(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via PUT");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_post_string() {
+ _("POST to a resource (string)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.post(JSON.stringify(sample_data), function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_post_object() {
+ _("POST to a resource (object)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.post(sample_data, function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_post_data_string() {
+ _("POST without data arg (uses resource.data) (string)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.data = JSON.stringify(sample_data);
+ res_upload.post(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_post_data_object() {
+ _("POST without data arg (uses resource.data) (object)");
+ let res_upload = new AsyncResource(server.baseURI + "/upload");
+ res_upload.data = sample_data;
+ res_upload.post(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "Valid data upload via POST");
+ do_check_eq(content.status, 200);
+ do_check_eq(res_upload.data, content);
+ run_next_test();
+ });
+});
+
+add_test(function test_delete() {
+ _("DELETE a resource");
+ let res6 = new AsyncResource(server.baseURI + "/delete");
+ res6.delete(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "This resource has been deleted");
+ do_check_eq(content.status, 200);
+ run_next_test();
+ });
+});
+
+add_test(function test_json_body() {
+ _("JSON conversion of response body");
+ let res7 = new AsyncResource(server.baseURI + "/json");
+ res7.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, JSON.stringify(sample_data));
+ do_check_eq(content.status, 200);
+ do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
+ run_next_test();
+ });
+});
+
+add_test(function test_weave_timestamp() {
+ _("X-Weave-Timestamp header updates AsyncResource.serverTime");
+ // Before having received any response containing the
+ // X-Weave-Timestamp header, AsyncResource.serverTime is null.
+ do_check_eq(AsyncResource.serverTime, null);
+ let res8 = new AsyncResource(server.baseURI + "/timestamp");
+ res8.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(AsyncResource.serverTime, TIMESTAMP);
+ run_next_test();
+ });
+});
+
+add_test(function test_get_no_headers() {
+ _("GET: no special request headers");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, '{}');
+ run_next_test();
+ });
+});
+
+add_test(function test_put_default_content_type() {
+ _("PUT: Content-Type defaults to text/plain");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.put('data', function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
+ run_next_test();
+ });
+});
+
+add_test(function test_post_default_content_type() {
+ _("POST: Content-Type defaults to text/plain");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.post('data', function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
+ run_next_test();
+ });
+});
+
+add_test(function test_setHeader() {
+ _("setHeader(): setting simple header");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.setHeader('X-What-Is-Weave', 'awesome');
+ do_check_eq(res_headers.headers['x-what-is-weave'], 'awesome');
+ res_headers.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
+ run_next_test();
+ });
+});
+
+add_test(function test_setHeader_overwrite() {
+ _("setHeader(): setting multiple headers, overwriting existing header");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.setHeader('X-WHAT-is-Weave', 'more awesomer');
+ res_headers.setHeader('X-Another-Header', 'hello world');
+ do_check_eq(res_headers.headers['x-what-is-weave'], 'more awesomer');
+ do_check_eq(res_headers.headers['x-another-header'], 'hello world');
+ res_headers.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
+ "x-what-is-weave": "more awesomer"}));
+
+ run_next_test();
+ });
+});
+
+add_test(function test_headers_object() {
+ _("Setting headers object");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.headers = {};
+ res_headers.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, "{}");
+ run_next_test();
+ });
+});
+
+add_test(function test_put_override_content_type() {
+ _("PUT: override default Content-Type");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.setHeader('Content-Type', 'application/foobar');
+ do_check_eq(res_headers.headers['content-type'], 'application/foobar');
+ res_headers.put('data', function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
+ run_next_test();
+ });
+});
+
+add_test(function test_post_override_content_type() {
+ _("POST: override default Content-Type");
+ let res_headers = new AsyncResource(server.baseURI + "/headers");
+ res_headers.setHeader('Content-Type', 'application/foobar');
+ res_headers.post('data', function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
+ run_next_test();
+ });
+});
+
+add_test(function test_weave_backoff() {
+ _("X-Weave-Backoff header notifies observer");
+ let backoffInterval;
+ function onBackoff(subject, data) {
+ backoffInterval = subject;
+ }
+ Observers.add("weave:service:backoff:interval", onBackoff);
+
+ let res10 = new AsyncResource(server.baseURI + "/backoff");
+ res10.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(backoffInterval, 600);
+ run_next_test();
+ });
+});
+
+add_test(function test_quota_error() {
+ _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
+ let res10 = new AsyncResource(server.baseURI + "/quota-error");
+ res10.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content.status, 400);
+ do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
+ run_next_test();
+ });
+});
+
+add_test(function test_quota_notice() {
+ let res10 = new AsyncResource(server.baseURI + "/quota-notice");
+ res10.get(function (error, content) {
+ do_check_eq(error, null);
+ do_check_eq(content.status, 200);
+ do_check_eq(quotaValue, 1048576);
+ run_next_test();
+ });
+});
+
+add_test(function test_preserve_exceptions() {
+ _("Error handling in ChannelListener etc. preserves exception information");
+ let res11 = new AsyncResource("http://localhost:12345/does/not/exist");
+ res11.get(function (error, content) {
+ do_check_neq(error, null);
+ do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
+ do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
+ run_next_test();
+ });
+});
+
+add_test(function test_xpc_exception_handling() {
+ _("Exception handling inside fetches.");
+ let res14 = new AsyncResource(server.baseURI + "/json");
+ res14._onProgress = function(rec) {
+ // Provoke an XPC exception without a Javascript wrapper.
+ Services.io.newURI("::::::::", null, null);
+ };
+ let warnings = [];
+ res14._log.warn = function(msg) { warnings.push(msg); };
+
+ res14.get(function (error, content) {
+ do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
+ do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
+ do_check_eq(content, null);
+ do_check_eq(warnings.pop(),
+ "Got exception calling onProgress handler during fetch of " +
+ server.baseURI + "/json");
+
+ run_next_test();
+ });
+});
+
+add_test(function test_js_exception_handling() {
+ _("JS exception handling inside fetches.");
+ let res15 = new AsyncResource(server.baseURI + "/json");
+ res15._onProgress = function(rec) {
+ throw "BOO!";
+ };
+ let warnings = [];
+ res15._log.warn = function(msg) { warnings.push(msg); };
+
+ res15.get(function (error, content) {
+ do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
+ do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
+ do_check_eq(content, null);
+ do_check_eq(warnings.pop(),
+ "Got exception calling onProgress handler during fetch of " +
+ server.baseURI + "/json");
+
+ run_next_test();
+ });
+});
+
+add_test(function test_timeout() {
+ _("Ensure channel timeouts are thrown appropriately.");
+ let res19 = new AsyncResource(server.baseURI + "/json");
+ res19.ABORT_TIMEOUT = 0;
+ res19.get(function (error, content) {
+ do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
+ run_next_test();
+ });
+});
+
+add_test(function test_uri_construction() {
+ _("Testing URI construction.");
+ let args = [];
+ args.push("newer=" + 1234);
+ args.push("limit=" + 1234);
+ args.push("sort=" + 1234);
+
+ let query = "?" + args.join("&");
+
+ let uri1 = Utils.makeURI("http://foo/" + query)
+ .QueryInterface(Ci.nsIURL);
+ let uri2 = Utils.makeURI("http://foo/")
+ .QueryInterface(Ci.nsIURL);
+ uri2.query = query;
+ do_check_eq(uri1.query, uri2.query);
+
+ run_next_test();
+});
+
+add_test(function test_not_sending_cookie() {
+ function handler(metadata, response) {
+ let body = "COOKIE!";
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ do_check_false(metadata.hasHeader("Cookie"));
+ }
+ let cookieSer = Cc["@mozilla.org/cookieService;1"]
+ .getService(Ci.nsICookieService);
+ let uri = CommonUtils.makeURI(server.baseURI);
+ cookieSer.setCookieString(uri, null, "test=test; path=/;", null);
+
+ let res = new AsyncResource(server.baseURI + "/test");
+ res.get(function (error) {
+ do_check_null(error);
+ do_check_true(this.response.success);
+ do_check_eq("COOKIE!", this.response.body);
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * End of tests that rely on a single HTTP server.
+ * All tests after this point must begin and end their own.
+ */
+add_test(function eliminate_server() {
+ server.stop(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_resource_header.js b/services/sync/tests/unit/test_resource_header.js
new file mode 100644
index 000000000..4f28e01da
--- /dev/null
+++ b/services/sync/tests/unit/test_resource_header.js
@@ -0,0 +1,65 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://testing-common/httpd.js");
+Cu.import("resource://services-sync/resource.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+var httpServer = new HttpServer();
+httpServer.registerPathHandler("/content", contentHandler);
+httpServer.start(-1);
+
+const HTTP_PORT = httpServer.identity.primaryPort;
+const TEST_URL = "http://localhost:" + HTTP_PORT + "/content";
+const BODY = "response body";
+
+// Keep headers for later inspection.
+var auth = null;
+var foo = null;
+function contentHandler(metadata, response) {
+ _("Handling request.");
+ auth = metadata.getHeader("Authorization");
+ foo = metadata.getHeader("X-Foo");
+
+ _("Extracted headers. " + auth + ", " + foo);
+
+ response.setHeader("Content-Type", "text/plain");
+ response.bodyOutputStream.write(BODY, BODY.length);
+}
+
+// Set a proxy function to cause an internal redirect.
+function triggerRedirect() {
+ const PROXY_FUNCTION = "function FindProxyForURL(url, host) {" +
+ " return 'PROXY a_non_existent_domain_x7x6c572v:80; " +
+ "PROXY localhost:" + HTTP_PORT + "';" +
+ "}";
+
+ let prefsService = Cc["@mozilla.org/preferences-service;1"].getService(Ci.nsIPrefService);
+ let prefs = prefsService.getBranch("network.proxy.");
+ prefs.setIntPref("type", 2);
+ prefs.setCharPref("autoconfig_url", "data:text/plain," + PROXY_FUNCTION);
+}
+
+add_test(function test_headers_copied() {
+ triggerRedirect();
+
+ _("Issuing request.");
+ let resource = new Resource(TEST_URL);
+ resource.setHeader("Authorization", "Basic foobar");
+ resource.setHeader("X-Foo", "foofoo");
+
+ let result = resource.get(TEST_URL);
+ _("Result: " + result);
+
+ do_check_eq(result, BODY);
+ do_check_eq(auth, "Basic foobar");
+ do_check_eq(foo, "foofoo");
+
+ httpServer.stop(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_resource_ua.js b/services/sync/tests/unit/test_resource_ua.js
new file mode 100644
index 000000000..31c2cd379
--- /dev/null
+++ b/services/sync/tests/unit/test_resource_ua.js
@@ -0,0 +1,100 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+var httpProtocolHandler = Cc["@mozilla.org/network/protocol;1?name=http"]
+ .getService(Ci.nsIHttpProtocolHandler);
+
+// Tracking info/collections.
+var collectionsHelper = track_collections_helper();
+var collections = collectionsHelper.collections;
+
+var meta_global;
+var server;
+
+var expectedUA;
+var ua;
+function uaHandler(f) {
+ return function(request, response) {
+ ua = request.getHeader("User-Agent");
+ return f(request, response);
+ };
+}
+
+function run_test() {
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+ meta_global = new ServerWBO('global');
+ server = httpd_setup({
+ "/1.1/johndoe/info/collections": uaHandler(collectionsHelper.handler),
+ "/1.1/johndoe/storage/meta/global": uaHandler(meta_global.handler()),
+ });
+
+ ensureLegacyIdentityManager();
+ setBasicCredentials("johndoe", "ilovejane");
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+ _("Server URL: " + server.baseURI);
+
+ // Note this string is missing the trailing ".destkop" as the test
+ // adjusts the "client.type" pref where that portion comes from.
+ expectedUA = Services.appinfo.name + "/" + Services.appinfo.version +
+ " (" + httpProtocolHandler.oscpu + ")" +
+ " FxSync/" + WEAVE_VERSION + "." +
+ Services.appinfo.appBuildID;
+
+ run_next_test();
+}
+
+add_test(function test_fetchInfo() {
+ _("Testing _fetchInfo.");
+ Service._fetchInfo();
+ _("User-Agent: " + ua);
+ do_check_eq(ua, expectedUA + ".desktop");
+ ua = "";
+ run_next_test();
+});
+
+add_test(function test_desktop_post() {
+ _("Testing direct Resource POST.");
+ let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
+ r.post("foo=bar", function (error, content) {
+ _("User-Agent: " + ua);
+ do_check_eq(ua, expectedUA + ".desktop");
+ ua = "";
+ run_next_test();
+ });
+});
+
+add_test(function test_desktop_get() {
+ _("Testing async.");
+ Svc.Prefs.set("client.type", "desktop");
+ let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
+ r.get(function(error, content) {
+ _("User-Agent: " + ua);
+ do_check_eq(ua, expectedUA + ".desktop");
+ ua = "";
+ run_next_test();
+ });
+});
+
+add_test(function test_mobile_get() {
+ _("Testing mobile.");
+ Svc.Prefs.set("client.type", "mobile");
+ let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
+ r.get(function (error, content) {
+ _("User-Agent: " + ua);
+ do_check_eq(ua, expectedUA + ".mobile");
+ ua = "";
+ run_next_test();
+ });
+});
+
+add_test(function tear_down() {
+ server.stop(run_next_test);
+});
+
diff --git a/services/sync/tests/unit/test_score_triggers.js b/services/sync/tests/unit/test_score_triggers.js
new file mode 100644
index 000000000..513be685a
--- /dev/null
+++ b/services/sync/tests/unit/test_score_triggers.js
@@ -0,0 +1,149 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.clear();
+Service.engineManager.register(RotaryEngine);
+var engine = Service.engineManager.get("rotary");
+var tracker = engine._tracker;
+engine.enabled = true;
+
+// Tracking info/collections.
+var collectionsHelper = track_collections_helper();
+var upd = collectionsHelper.with_updated_collection;
+
+function sync_httpd_setup() {
+ let handlers = {};
+
+ handlers["/1.1/johndoe/storage/meta/global"] =
+ new ServerWBO("global", {}).handler();
+ handlers["/1.1/johndoe/storage/steam"] =
+ new ServerWBO("steam", {}).handler();
+
+ handlers["/1.1/johndoe/info/collections"] = collectionsHelper.handler;
+ delete collectionsHelper.collections.crypto;
+ delete collectionsHelper.collections.meta;
+
+ let cr = new ServerWBO("keys");
+ handlers["/1.1/johndoe/storage/crypto/keys"] =
+ upd("crypto", cr.handler());
+
+ let cl = new ServerCollection();
+ handlers["/1.1/johndoe/storage/clients"] =
+ upd("clients", cl.handler());
+
+ return httpd_setup(handlers);
+}
+
+function setUp(server) {
+ new SyncTestingInfrastructure(server, "johndoe", "ilovejane", "sekrit");
+}
+
+function run_test() {
+ initTestLogging("Trace");
+
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+
+ run_next_test();
+}
+
+add_test(function test_tracker_score_updated() {
+ let scoreUpdated = 0;
+
+ function onScoreUpdated() {
+ scoreUpdated++;
+ }
+
+ Svc.Obs.add("weave:engine:score:updated", onScoreUpdated());
+
+ try {
+ do_check_eq(engine.score, 0);
+
+ tracker.score += SCORE_INCREMENT_SMALL;
+ do_check_eq(engine.score, SCORE_INCREMENT_SMALL);
+
+ do_check_eq(scoreUpdated, 1);
+ } finally {
+ Svc.Obs.remove("weave:engine:score:updated", onScoreUpdated);
+ tracker.resetScore();
+ run_next_test();
+ }
+});
+
+add_test(function test_sync_triggered() {
+ let server = sync_httpd_setup();
+ setUp(server);
+
+ Service.login();
+
+ Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
+ Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ _("Sync completed!");
+ server.stop(run_next_test);
+ });
+
+ do_check_eq(Status.login, LOGIN_SUCCEEDED);
+ tracker.score += SCORE_INCREMENT_XLARGE;
+});
+
+add_test(function test_clients_engine_sync_triggered() {
+ _("Ensure that client engine score changes trigger a sync.");
+
+ // The clients engine is not registered like other engines. Therefore,
+ // it needs special treatment throughout the code. Here, we verify the
+ // global score tracker gives it that treatment. See bug 676042 for more.
+
+ let server = sync_httpd_setup();
+ setUp(server);
+ Service.login();
+
+ const TOPIC = "weave:service:sync:finish";
+ Svc.Obs.add(TOPIC, function onSyncFinish() {
+ Svc.Obs.remove(TOPIC, onSyncFinish);
+ _("Sync due to clients engine change completed.");
+ server.stop(run_next_test);
+ });
+
+ Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
+ do_check_eq(Status.login, LOGIN_SUCCEEDED);
+ Service.clientsEngine._tracker.score += SCORE_INCREMENT_XLARGE;
+});
+
+add_test(function test_incorrect_credentials_sync_not_triggered() {
+ _("Ensure that score changes don't trigger a sync if Status.login != LOGIN_SUCCEEDED.");
+ let server = sync_httpd_setup();
+ setUp(server);
+
+ // Ensure we don't actually try to sync.
+ function onSyncStart() {
+ do_throw("Should not get here!");
+ }
+ Svc.Obs.add("weave:service:sync:start", onSyncStart);
+
+ // First wait >100ms (nsITimers can take up to that much time to fire, so
+ // we can account for the timer in delayedAutoconnect) and then one event
+ // loop tick (to account for a possible call to weave:service:sync:start).
+ Utils.namedTimer(function() {
+ Utils.nextTick(function() {
+ Svc.Obs.remove("weave:service:sync:start", onSyncStart);
+
+ do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED);
+
+ Service.startOver();
+ server.stop(run_next_test);
+ });
+ }, 150, {}, "timer");
+
+ // Faking incorrect credentials to prevent score update.
+ Status.login = LOGIN_FAILED_LOGIN_REJECTED;
+ tracker.score += SCORE_INCREMENT_XLARGE;
+});
diff --git a/services/sync/tests/unit/test_sendcredentials_controller.js b/services/sync/tests/unit/test_sendcredentials_controller.js
new file mode 100644
index 000000000..42e5ec8e8
--- /dev/null
+++ b/services/sync/tests/unit/test_sendcredentials_controller.js
@@ -0,0 +1,102 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/jpakeclient.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ ensureLegacyIdentityManager();
+ setBasicCredentials("johndoe", "ilovejane", Utils.generatePassphrase());
+ Service.serverURL = "http://weave.server/";
+
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.SendCredentialsController").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
+ run_next_test();
+}
+
+function make_sendCredentials_test(topic) {
+ return function test_sendCredentials() {
+ _("Test sending credentials on " + topic + " observer notification.");
+
+ let sendAndCompleteCalled = false;
+ let jpakeclient = {
+ sendAndComplete: function sendAndComplete(data) {
+ // Verify that the controller unregisters itself as an observer
+ // when the exchange is complete by faking another notification.
+ do_check_false(sendAndCompleteCalled);
+ sendAndCompleteCalled = true;
+
+ // Verify it sends the correct data.
+ do_check_eq(data.account, Service.identity.account);
+ do_check_eq(data.password, Service.identity.basicPassword);
+ do_check_eq(data.synckey, Service.identity.syncKey);
+ do_check_eq(data.serverURL, Service.serverURL);
+
+ this.controller.onComplete();
+ // Verify it schedules a sync for the expected interval.
+ let expectedInterval = Service.scheduler.activeInterval;
+ do_check_true(Service.scheduler.nextSync - Date.now() <= expectedInterval);
+
+ // Signal the end of another sync. We shouldn't be registered anymore,
+ // so we shouldn't re-enter this method (cf sendAndCompleteCalled above)
+ Svc.Obs.notify(topic);
+
+ Service.scheduler.setDefaults();
+ Utils.nextTick(run_next_test);
+ }
+ };
+ jpakeclient.controller = new SendCredentialsController(jpakeclient, Service);
+ Svc.Obs.notify(topic);
+ };
+}
+
+add_test(make_sendCredentials_test("weave:service:sync:finish"));
+add_test(make_sendCredentials_test("weave:service:sync:error"));
+
+
+add_test(function test_abort() {
+ _("Test aborting the J-PAKE exchange.");
+
+ let jpakeclient = {
+ sendAndComplete: function sendAndComplete() {
+ do_throw("Shouldn't get here!");
+ }
+ };
+ jpakeclient.controller = new SendCredentialsController(jpakeclient, Service);
+
+ // Verify that the controller unregisters itself when the exchange
+ // was aborted.
+ jpakeclient.controller.onAbort(JPAKE_ERROR_USERABORT);
+ Svc.Obs.notify("weave:service:sync:finish");
+ Utils.nextTick(run_next_test);
+});
+
+
+add_test(function test_startOver() {
+ _("Test wiping local Sync config aborts transaction.");
+
+ let abortCalled = false;
+ let jpakeclient = {
+ abort: function abort() {
+ abortCalled = true;
+ this.controller.onAbort(JPAKE_ERROR_USERABORT);
+ },
+ sendAndComplete: function sendAndComplete() {
+ do_throw("Shouldn't get here!");
+ }
+ };
+ jpakeclient.controller = new SendCredentialsController(jpakeclient, Service);
+
+ Svc.Obs.notify("weave:service:start-over");
+ do_check_true(abortCalled);
+
+ // Ensure that the controller no longer does anything if a sync
+ // finishes now or -- more likely -- errors out.
+ Svc.Obs.notify("weave:service:sync:error");
+
+ Utils.nextTick(run_next_test);
+});
diff --git a/services/sync/tests/unit/test_service_attributes.js b/services/sync/tests/unit/test_service_attributes.js
new file mode 100644
index 000000000..931c7741a
--- /dev/null
+++ b/services/sync/tests/unit/test_service_attributes.js
@@ -0,0 +1,118 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/fakeservices.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function test_urls() {
+ _("URL related Service properties correspond to preference settings.");
+ try {
+ ensureLegacyIdentityManager();
+ do_check_true(!!Service.serverURL); // actual value may change
+ do_check_eq(Service.clusterURL, "");
+ do_check_eq(Service.userBaseURL, undefined);
+ do_check_eq(Service.infoURL, undefined);
+ do_check_eq(Service.storageURL, undefined);
+ do_check_eq(Service.metaURL, undefined);
+
+ _("The 'clusterURL' attribute updates preferences and cached URLs.");
+ Service.identity.username = "johndoe";
+
+ // Since we don't have a cluster URL yet, these will still not be defined.
+ do_check_eq(Service.infoURL, undefined);
+ do_check_eq(Service.userBaseURL, undefined);
+ do_check_eq(Service.storageURL, undefined);
+ do_check_eq(Service.metaURL, undefined);
+
+ Service.serverURL = "http://weave.server/";
+ Service.clusterURL = "http://weave.cluster/";
+
+ do_check_eq(Service.userBaseURL, "http://weave.cluster/1.1/johndoe/");
+ do_check_eq(Service.infoURL,
+ "http://weave.cluster/1.1/johndoe/info/collections");
+ do_check_eq(Service.storageURL,
+ "http://weave.cluster/1.1/johndoe/storage/");
+ do_check_eq(Service.metaURL,
+ "http://weave.cluster/1.1/johndoe/storage/meta/global");
+
+ _("The 'miscURL' and 'userURL' attributes can be relative to 'serverURL' or absolute.");
+ Svc.Prefs.set("miscURL", "relative/misc/");
+ Svc.Prefs.set("userURL", "relative/user/");
+ do_check_eq(Service.miscAPI,
+ "http://weave.server/relative/misc/1.0/");
+ do_check_eq(Service.userAPIURI,
+ "http://weave.server/relative/user/1.0/");
+
+ Svc.Prefs.set("miscURL", "http://weave.misc.services/");
+ Svc.Prefs.set("userURL", "http://weave.user.services/");
+ do_check_eq(Service.miscAPI, "http://weave.misc.services/1.0/");
+ do_check_eq(Service.userAPIURI, "http://weave.user.services/1.0/");
+
+ do_check_eq(Service.pwResetURL,
+ "http://weave.server/weave-password-reset");
+
+ _("Empty/false value for 'username' resets preference.");
+ Service.identity.username = "";
+ do_check_eq(Svc.Prefs.get("username"), undefined);
+ do_check_eq(Service.identity.username, null);
+
+ _("The 'serverURL' attributes updates/resets preferences.");
+ // Identical value doesn't do anything
+ Service.serverURL = Service.serverURL;
+ do_check_eq(Service.clusterURL, "http://weave.cluster/");
+
+ Service.serverURL = "http://different.auth.node/";
+ do_check_eq(Svc.Prefs.get("serverURL"), "http://different.auth.node/");
+ do_check_eq(Service.clusterURL, "");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+
+function test_syncID() {
+ _("Service.syncID is auto-generated, corresponds to preference.");
+ new FakeGUIDService();
+
+ try {
+ // Ensure pristine environment
+ do_check_eq(Svc.Prefs.get("client.syncID"), undefined);
+
+ // Performing the first get on the attribute will generate a new GUID.
+ do_check_eq(Service.syncID, "fake-guid-00");
+ do_check_eq(Svc.Prefs.get("client.syncID"), "fake-guid-00");
+
+ Svc.Prefs.set("client.syncID", Utils.makeGUID());
+ do_check_eq(Svc.Prefs.get("client.syncID"), "fake-guid-01");
+ do_check_eq(Service.syncID, "fake-guid-01");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ new FakeGUIDService();
+ }
+}
+
+function test_locked() {
+ _("The 'locked' attribute can be toggled with lock() and unlock()");
+
+ // Defaults to false
+ do_check_eq(Service.locked, false);
+
+ do_check_eq(Service.lock(), true);
+ do_check_eq(Service.locked, true);
+
+ // Locking again will return false
+ do_check_eq(Service.lock(), false);
+
+ Service.unlock();
+ do_check_eq(Service.locked, false);
+}
+
+function run_test() {
+ test_urls();
+ test_syncID();
+ test_locked();
+}
diff --git a/services/sync/tests/unit/test_service_changePassword.js b/services/sync/tests/unit/test_service_changePassword.js
new file mode 100644
index 000000000..12b0ad00e
--- /dev/null
+++ b/services/sync/tests/unit/test_service_changePassword.js
@@ -0,0 +1,80 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Resource").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+
+ ensureLegacyIdentityManager();
+
+ run_next_test();
+}
+
+add_test(function test_change_password() {
+ let requestBody;
+ let server;
+
+ function send(statusCode, status, body) {
+ return function(request, response) {
+ requestBody = readBytesFromInputStream(request.bodyInputStream);
+ response.setStatusLine(request.httpVersion, statusCode, status);
+ response.bodyOutputStream.write(body, body.length);
+ };
+ }
+
+ try {
+ Service.baseURI = "http://localhost:9999/";
+ Service.serverURL = "http://localhost:9999/";
+ setBasicCredentials("johndoe", "ilovejane");
+
+ _("changePassword() returns false for a network error, the password won't change.");
+ let res = Service.changePassword("ILoveJane83");
+ do_check_false(res);
+ do_check_eq(Service.identity.basicPassword, "ilovejane");
+
+ _("Let's fire up the server and actually change the password.");
+ server = httpd_setup({
+ "/user/1.0/johndoe/password": send(200, "OK", ""),
+ "/user/1.0/janedoe/password": send(401, "Unauthorized", "Forbidden!")
+ });
+
+ Service.serverURL = server.baseURI;
+ res = Service.changePassword("ILoveJane83");
+ do_check_true(res);
+ do_check_eq(Service.identity.basicPassword, "ILoveJane83");
+ do_check_eq(requestBody, "ILoveJane83");
+
+ _("Make sure the password has been persisted in the login manager.");
+ let logins = Services.logins.findLogins({}, PWDMGR_HOST, null,
+ PWDMGR_PASSWORD_REALM);
+ do_check_eq(logins.length, 1);
+ do_check_eq(logins[0].password, "ILoveJane83");
+
+ _("A non-ASCII password is UTF-8 encoded.");
+ const moneyPassword = "moneyislike$£¥";
+ res = Service.changePassword(moneyPassword);
+ do_check_true(res);
+ do_check_eq(Service.identity.basicPassword, Utils.encodeUTF8(moneyPassword));
+ do_check_eq(requestBody, Utils.encodeUTF8(moneyPassword));
+
+ _("changePassword() returns false for a server error, the password won't change.");
+ Services.logins.removeAllLogins();
+ setBasicCredentials("janedoe", "ilovejohn");
+ res = Service.changePassword("ILoveJohn86");
+ do_check_false(res);
+ do_check_eq(Service.identity.basicPassword, "ilovejohn");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Services.logins.removeAllLogins();
+ server.stop(run_next_test);
+ }
+});
diff --git a/services/sync/tests/unit/test_service_checkAccount.js b/services/sync/tests/unit/test_service_checkAccount.js
new file mode 100644
index 000000000..618348d1a
--- /dev/null
+++ b/services/sync/tests/unit/test_service_checkAccount.js
@@ -0,0 +1,41 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ do_test_pending();
+ ensureLegacyIdentityManager();
+ let server = httpd_setup({
+ "/user/1.0/johndoe": httpd_handler(200, "OK", "1"),
+ "/user/1.0/janedoe": httpd_handler(200, "OK", "0"),
+ // john@doe.com
+ "/user/1.0/7wohs32cngzuqt466q3ge7indszva4of": httpd_handler(200, "OK", "0"),
+ // jane@doe.com
+ "/user/1.0/vuuf3eqgloxpxmzph27f5a6ve7gzlrms": httpd_handler(200, "OK", "1")
+ });
+ try {
+ Service.serverURL = server.baseURI;
+
+ _("A 404 will be recorded as 'generic-server-error'");
+ do_check_eq(Service.checkAccount("jimdoe"), "generic-server-error");
+
+ _("Account that's available.");
+ do_check_eq(Service.checkAccount("john@doe.com"), "available");
+
+ _("Account that's not available.");
+ do_check_eq(Service.checkAccount("jane@doe.com"), "notAvailable");
+
+ _("Username fallback: Account that's not available.");
+ do_check_eq(Service.checkAccount("johndoe"), "notAvailable");
+
+ _("Username fallback: Account that's available.");
+ do_check_eq(Service.checkAccount("janedoe"), "available");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(do_test_finished);
+ }
+}
diff --git a/services/sync/tests/unit/test_service_cluster.js b/services/sync/tests/unit/test_service_cluster.js
new file mode 100644
index 000000000..65f0c3a95
--- /dev/null
+++ b/services/sync/tests/unit/test_service_cluster.js
@@ -0,0 +1,110 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function do_check_throws(func) {
+ var raised = false;
+ try {
+ func();
+ } catch (ex) {
+ raised = true;
+ }
+ do_check_true(raised);
+}
+
+add_test(function test_findCluster() {
+ _("Test Service._findCluster()");
+ let server;
+ ensureLegacyIdentityManager();
+ try {
+ _("_findCluster() throws on network errors (e.g. connection refused).");
+ do_check_throws(function() {
+ Service.serverURL = "http://dummy:9000/";
+ Service.identity.account = "johndoe";
+ Service._clusterManager._findCluster();
+ });
+
+ server = httpd_setup({
+ "/user/1.0/johndoe/node/weave": httpd_handler(200, "OK", "http://weave.user.node/"),
+ "/user/1.0/jimdoe/node/weave": httpd_handler(200, "OK", "null"),
+ "/user/1.0/janedoe/node/weave": httpd_handler(404, "Not Found", "Not Found"),
+ "/user/1.0/juliadoe/node/weave": httpd_handler(400, "Bad Request", "Bad Request"),
+ "/user/1.0/joedoe/node/weave": httpd_handler(500, "Server Error", "Server Error")
+ });
+
+ Service.serverURL = server.baseURI;
+ Service.identity.account = "johndoe";
+
+ _("_findCluster() returns the user's cluster node");
+ let cluster = Service._clusterManager._findCluster();
+ do_check_eq(cluster, "http://weave.user.node/");
+
+ _("A 'null' response is converted to null.");
+ Service.identity.account = "jimdoe";
+ cluster = Service._clusterManager._findCluster();
+ do_check_eq(cluster, null);
+
+ _("If a 404 is encountered, the server URL is taken as the cluster URL");
+ Service.identity.account = "janedoe";
+ cluster = Service._clusterManager._findCluster();
+ do_check_eq(cluster, Service.serverURL);
+
+ _("A 400 response will throw an error.");
+ Service.identity.account = "juliadoe";
+ do_check_throws(function() {
+ Service._clusterManager._findCluster();
+ });
+
+ _("Any other server response (e.g. 500) will throw an error.");
+ Service.identity.account = "joedoe";
+ do_check_throws(function() {
+ Service._clusterManager._findCluster();
+ });
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ if (server) {
+ server.stop(run_next_test);
+ }
+ }
+});
+
+add_test(function test_setCluster() {
+ _("Test Service._setCluster()");
+ let server = httpd_setup({
+ "/user/1.0/johndoe/node/weave": httpd_handler(200, "OK", "http://weave.user.node/"),
+ "/user/1.0/jimdoe/node/weave": httpd_handler(200, "OK", "null")
+ });
+ try {
+ Service.serverURL = server.baseURI;
+ Service.identity.account = "johndoe";
+
+ _("Check initial state.");
+ do_check_eq(Service.clusterURL, "");
+
+ _("Set the cluster URL.");
+ do_check_true(Service._clusterManager.setCluster());
+ do_check_eq(Service.clusterURL, "http://weave.user.node/");
+
+ _("Setting it again won't make a difference if it's the same one.");
+ do_check_false(Service._clusterManager.setCluster());
+ do_check_eq(Service.clusterURL, "http://weave.user.node/");
+
+ _("A 'null' response won't make a difference either.");
+ Service.identity.account = "jimdoe";
+ do_check_false(Service._clusterManager.setCluster());
+ do_check_eq(Service.clusterURL, "http://weave.user.node/");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(run_next_test);
+ }
+});
+
+function run_test() {
+ initTestLogging();
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_service_createAccount.js b/services/sync/tests/unit/test_service_createAccount.js
new file mode 100644
index 000000000..93c6f78e3
--- /dev/null
+++ b/services/sync/tests/unit/test_service_createAccount.js
@@ -0,0 +1,75 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ initTestLogging("Trace");
+
+ let requestBody;
+ let secretHeader;
+ function send(statusCode, status, body) {
+ return function(request, response) {
+ requestBody = readBytesFromInputStream(request.bodyInputStream);
+ if (request.hasHeader("X-Weave-Secret")) {
+ secretHeader = request.getHeader("X-Weave-Secret");
+ }
+
+ response.setStatusLine(request.httpVersion, statusCode, status);
+ response.bodyOutputStream.write(body, body.length);
+ };
+ }
+
+ do_test_pending();
+ let server = httpd_setup({
+ // john@doe.com
+ "/user/1.0/7wohs32cngzuqt466q3ge7indszva4of": send(200, "OK", "0"),
+ // jane@doe.com
+ "/user/1.0/vuuf3eqgloxpxmzph27f5a6ve7gzlrms": send(400, "Bad Request", "2"),
+ // jim@doe.com
+ "/user/1.0/vz6fhecgw5t3sgx3a4cektoiokyczkqd": send(500, "Server Error", "Server Error")
+ });
+ try {
+ Service.serverURL = server.baseURI;
+
+ _("Create an account.");
+ let res = Service.createAccount("john@doe.com", "mysecretpw",
+ "challenge", "response");
+ do_check_eq(res, null);
+ let payload = JSON.parse(requestBody);
+ do_check_eq(payload.password, "mysecretpw");
+ do_check_eq(payload.email, "john@doe.com");
+ do_check_eq(payload["captcha-challenge"], "challenge");
+ do_check_eq(payload["captcha-response"], "response");
+
+ _("A non-ASCII password is UTF-8 encoded.");
+ const moneyPassword = "moneyislike$£¥";
+ res = Service.createAccount("john@doe.com", moneyPassword,
+ "challenge", "response");
+ do_check_eq(res, null);
+ payload = JSON.parse(requestBody);
+ do_check_eq(payload.password, Utils.encodeUTF8(moneyPassword));
+
+ _("Invalid captcha or other user-friendly error.");
+ res = Service.createAccount("jane@doe.com", "anothersecretpw",
+ "challenge", "response");
+ do_check_eq(res, "invalid-captcha");
+
+ _("Generic server error.");
+ res = Service.createAccount("jim@doe.com", "preciousss",
+ "challenge", "response");
+ do_check_eq(res, "generic-server-error");
+
+ _("Admin secret preference is passed as HTTP header token.");
+ Svc.Prefs.set("admin-secret", "my-server-secret");
+ res = Service.createAccount("john@doe.com", "mysecretpw",
+ "challenge", "response");
+ do_check_eq(secretHeader, "my-server-secret");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(do_test_finished);
+ }
+}
diff --git a/services/sync/tests/unit/test_service_detect_upgrade.js b/services/sync/tests/unit/test_service_detect_upgrade.js
new file mode 100644
index 000000000..0f46832d9
--- /dev/null
+++ b/services/sync/tests/unit/test_service_detect_upgrade.js
@@ -0,0 +1,297 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/engines/tabs.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.register(TabEngine);
+
+add_test(function v4_upgrade() {
+ let passphrase = "abcdeabcdeabcdeabcdeabcdea";
+
+ let clients = new ServerCollection();
+ let meta_global = new ServerWBO('global');
+
+ // Tracking info/collections.
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+ let collections = collectionsHelper.collections;
+
+ let keysWBO = new ServerWBO("keys");
+ let server = httpd_setup({
+ // Special.
+ "/1.1/johndoe/info/collections": collectionsHelper.handler,
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", keysWBO.handler()),
+ "/1.1/johndoe/storage/meta/global": upd("meta", meta_global.handler()),
+
+ // Track modified times.
+ "/1.1/johndoe/storage/clients": upd("clients", clients.handler()),
+ "/1.1/johndoe/storage/tabs": upd("tabs", new ServerCollection().handler()),
+
+ // Just so we don't get 404s in the logs.
+ "/1.1/johndoe/storage/bookmarks": new ServerCollection().handler(),
+ "/1.1/johndoe/storage/forms": new ServerCollection().handler(),
+ "/1.1/johndoe/storage/history": new ServerCollection().handler(),
+ "/1.1/johndoe/storage/passwords": new ServerCollection().handler(),
+ "/1.1/johndoe/storage/prefs": new ServerCollection().handler()
+ });
+
+ ensureLegacyIdentityManager();
+
+ try {
+
+ _("Set up some tabs.");
+ let myTabs =
+ {windows: [{tabs: [{index: 1,
+ entries: [{
+ url: "http://foo.com/",
+ title: "Title"
+ }],
+ attributes: {
+ image: "image"
+ }
+ }]}]};
+ delete Svc.Session;
+ Svc.Session = {
+ getBrowserState: () => JSON.stringify(myTabs)
+ };
+
+ Service.status.resetSync();
+
+ _("Logging in.");
+ Service.serverURL = server.baseURI;
+
+ Service.login("johndoe", "ilovejane", passphrase);
+ do_check_true(Service.isLoggedIn);
+ Service.verifyAndFetchSymmetricKeys();
+ do_check_true(Service._remoteSetup());
+
+ function test_out_of_date() {
+ _("Old meta/global: " + JSON.stringify(meta_global));
+ meta_global.payload = JSON.stringify({"syncID": "foooooooooooooooooooooooooo",
+ "storageVersion": STORAGE_VERSION + 1});
+ collections.meta = Date.now() / 1000;
+ _("New meta/global: " + JSON.stringify(meta_global));
+ Service.recordManager.set(Service.metaURL, meta_global);
+ try {
+ Service.sync();
+ }
+ catch (ex) {
+ }
+ do_check_eq(Service.status.sync, VERSION_OUT_OF_DATE);
+ }
+
+ // See what happens when we bump the storage version.
+ _("Syncing after server has been upgraded.");
+ test_out_of_date();
+
+ // Same should happen after a wipe.
+ _("Syncing after server has been upgraded and wiped.");
+ Service.wipeServer();
+ test_out_of_date();
+
+ // Now's a great time to test what happens when keys get replaced.
+ _("Syncing afresh...");
+ Service.logout();
+ Service.collectionKeys.clear();
+ Service.serverURL = server.baseURI;
+ meta_global.payload = JSON.stringify({"syncID": "foooooooooooooobbbbbbbbbbbb",
+ "storageVersion": STORAGE_VERSION});
+ collections.meta = Date.now() / 1000;
+ Service.recordManager.set(Service.metaURL, meta_global);
+ Service.login("johndoe", "ilovejane", passphrase);
+ do_check_true(Service.isLoggedIn);
+ Service.sync();
+ do_check_true(Service.isLoggedIn);
+
+ let serverDecrypted;
+ let serverKeys;
+ let serverResp;
+
+
+ function retrieve_server_default() {
+ serverKeys = serverResp = serverDecrypted = null;
+
+ serverKeys = new CryptoWrapper("crypto", "keys");
+ serverResp = serverKeys.fetch(Service.resource(Service.cryptoKeysURL)).response;
+ do_check_true(serverResp.success);
+
+ serverDecrypted = serverKeys.decrypt(Service.identity.syncKeyBundle);
+ _("Retrieved WBO: " + JSON.stringify(serverDecrypted));
+ _("serverKeys: " + JSON.stringify(serverKeys));
+
+ return serverDecrypted.default;
+ }
+
+ function retrieve_and_compare_default(should_succeed) {
+ let serverDefault = retrieve_server_default();
+ let localDefault = Service.collectionKeys.keyForCollection().keyPairB64;
+
+ _("Retrieved keyBundle: " + JSON.stringify(serverDefault));
+ _("Local keyBundle: " + JSON.stringify(localDefault));
+
+ if (should_succeed)
+ do_check_eq(JSON.stringify(serverDefault), JSON.stringify(localDefault));
+ else
+ do_check_neq(JSON.stringify(serverDefault), JSON.stringify(localDefault));
+ }
+
+ // Uses the objects set above.
+ function set_server_keys(pair) {
+ serverDecrypted.default = pair;
+ serverKeys.cleartext = serverDecrypted;
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ serverKeys.upload(Service.resource(Service.cryptoKeysURL));
+ }
+
+ _("Checking we have the latest keys.");
+ retrieve_and_compare_default(true);
+
+ _("Update keys on server.");
+ set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
+ "aaaaaaaaaaaapxMO6TEWtLIOv9dj6kBAJdzhWDkkkis="]);
+
+ _("Checking that we no longer have the latest keys.");
+ retrieve_and_compare_default(false);
+
+ _("Indeed, they're what we set them to...");
+ do_check_eq("KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
+ retrieve_server_default()[0]);
+
+ _("Sync. Should download changed keys automatically.");
+ let oldClientsModified = collections.clients;
+ let oldTabsModified = collections.tabs;
+
+ Service.login("johndoe", "ilovejane", passphrase);
+ Service.sync();
+ _("New key should have forced upload of data.");
+ _("Tabs: " + oldTabsModified + " < " + collections.tabs);
+ _("Clients: " + oldClientsModified + " < " + collections.clients);
+ do_check_true(collections.clients > oldClientsModified);
+ do_check_true(collections.tabs > oldTabsModified);
+
+ _("... and keys will now match.");
+ retrieve_and_compare_default(true);
+
+ // Clean up.
+ Service.startOver();
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function v5_upgrade() {
+ let passphrase = "abcdeabcdeabcdeabcdeabcdea";
+
+ // Tracking info/collections.
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+ let collections = collectionsHelper.collections;
+
+ let keysWBO = new ServerWBO("keys");
+ let bulkWBO = new ServerWBO("bulk");
+ let clients = new ServerCollection();
+ let meta_global = new ServerWBO('global');
+
+ let server = httpd_setup({
+ // Special.
+ "/1.1/johndoe/storage/meta/global": upd("meta", meta_global.handler()),
+ "/1.1/johndoe/info/collections": collectionsHelper.handler,
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", keysWBO.handler()),
+ "/1.1/johndoe/storage/crypto/bulk": upd("crypto", bulkWBO.handler()),
+
+ // Track modified times.
+ "/1.1/johndoe/storage/clients": upd("clients", clients.handler()),
+ "/1.1/johndoe/storage/tabs": upd("tabs", new ServerCollection().handler()),
+ });
+
+ try {
+
+ _("Set up some tabs.");
+ let myTabs =
+ {windows: [{tabs: [{index: 1,
+ entries: [{
+ url: "http://foo.com/",
+ title: "Title"
+ }],
+ attributes: {
+ image: "image"
+ }
+ }]}]};
+ delete Svc.Session;
+ Svc.Session = {
+ getBrowserState: () => JSON.stringify(myTabs)
+ };
+
+ Service.status.resetSync();
+
+ setBasicCredentials("johndoe", "ilovejane", passphrase);
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+
+ // Test an upgrade where the contents of the server would cause us to error
+ // -- keys decrypted with a different sync key, for example.
+ _("Testing v4 -> v5 (or similar) upgrade.");
+ function update_server_keys(syncKeyBundle, wboName, collWBO) {
+ generateNewKeys(Service.collectionKeys);
+ serverKeys = Service.collectionKeys.asWBO("crypto", wboName);
+ serverKeys.encrypt(syncKeyBundle);
+ let res = Service.resource(Service.storageURL + collWBO);
+ do_check_true(serverKeys.upload(res).success);
+ }
+
+ _("Bumping version.");
+ // Bump version on the server.
+ let m = new WBORecord("meta", "global");
+ m.payload = {"syncID": "foooooooooooooooooooooooooo",
+ "storageVersion": STORAGE_VERSION + 1};
+ m.upload(Service.resource(Service.metaURL));
+
+ _("New meta/global: " + JSON.stringify(meta_global));
+
+ // Fill the keys with bad data.
+ let badKeys = new SyncKeyBundle("foobar", "aaaaaaaaaaaaaaaaaaaaaaaaaa");
+ update_server_keys(badKeys, "keys", "crypto/keys"); // v4
+ update_server_keys(badKeys, "bulk", "crypto/bulk"); // v5
+
+ _("Generating new keys.");
+ generateNewKeys(Service.collectionKeys);
+
+ // Now sync and see what happens. It should be a version fail, not a crypto
+ // fail.
+
+ _("Logging in.");
+ try {
+ Service.login("johndoe", "ilovejane", passphrase);
+ }
+ catch (e) {
+ _("Exception: " + e);
+ }
+ _("Status: " + Service.status);
+ do_check_false(Service.isLoggedIn);
+ do_check_eq(VERSION_OUT_OF_DATE, Service.status.sync);
+
+ // Clean up.
+ Service.startOver();
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(run_next_test);
+ }
+});
+
+function run_test() {
+ let logger = Log.repository.rootLogger;
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_service_getStorageInfo.js b/services/sync/tests/unit/test_service_getStorageInfo.js
new file mode 100644
index 000000000..841dceb78
--- /dev/null
+++ b/services/sync/tests/unit/test_service_getStorageInfo.js
@@ -0,0 +1,94 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+var httpProtocolHandler = Cc["@mozilla.org/network/protocol;1?name=http"]
+ .getService(Ci.nsIHttpProtocolHandler);
+
+var collections = {steam: 65.11328,
+ petrol: 82.488281,
+ diesel: 2.25488281};
+
+function run_test() {
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.StorageRequest").level = Log.Level.Trace;
+ initTestLogging();
+
+ ensureLegacyIdentityManager();
+ setBasicCredentials("johndoe", "ilovejane");
+
+ run_next_test();
+}
+
+add_test(function test_success() {
+ let handler = httpd_handler(200, "OK", JSON.stringify(collections));
+ let server = httpd_setup({"/1.1/johndoe/info/collections": handler});
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+
+ let request = Service.getStorageInfo("collections", function (error, info) {
+ do_check_eq(error, null);
+ do_check_true(Utils.deepEquals(info, collections));
+
+ // Ensure that the request is sent off with the right bits.
+ do_check_true(basic_auth_matches(handler.request,
+ Service.identity.username,
+ Service.identity.basicPassword));
+ let expectedUA = Services.appinfo.name + "/" + Services.appinfo.version +
+ " (" + httpProtocolHandler.oscpu + ")" +
+ " FxSync/" + WEAVE_VERSION + "." +
+ Services.appinfo.appBuildID + ".desktop";
+ do_check_eq(handler.request.getHeader("User-Agent"), expectedUA);
+
+ server.stop(run_next_test);
+ });
+ do_check_true(request instanceof RESTRequest);
+});
+
+add_test(function test_invalid_type() {
+ do_check_throws(function () {
+ Service.getStorageInfo("invalid", function (error, info) {
+ do_throw("Shouldn't get here!");
+ });
+ });
+ run_next_test();
+});
+
+add_test(function test_network_error() {
+ Service.getStorageInfo(INFO_COLLECTIONS, function (error, info) {
+ do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
+ do_check_eq(info, null);
+ run_next_test();
+ });
+});
+
+add_test(function test_http_error() {
+ let handler = httpd_handler(500, "Oh noez", "Something went wrong!");
+ let server = httpd_setup({"/1.1/johndoe/info/collections": handler});
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+
+ let request = Service.getStorageInfo(INFO_COLLECTIONS, function (error, info) {
+ do_check_eq(error.status, 500);
+ do_check_eq(info, null);
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_invalid_json() {
+ let handler = httpd_handler(200, "OK", "Invalid JSON");
+ let server = httpd_setup({"/1.1/johndoe/info/collections": handler});
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+
+ let request = Service.getStorageInfo(INFO_COLLECTIONS, function (error, info) {
+ do_check_eq(error.name, "SyntaxError");
+ do_check_eq(info, null);
+ server.stop(run_next_test);
+ });
+});
diff --git a/services/sync/tests/unit/test_service_login.js b/services/sync/tests/unit/test_service_login.js
new file mode 100644
index 000000000..42c163915
--- /dev/null
+++ b/services/sync/tests/unit/test_service_login.js
@@ -0,0 +1,245 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function login_handling(handler) {
+ return function (request, response) {
+ if (basic_auth_matches(request, "johndoe", "ilovejane") ||
+ basic_auth_matches(request, "janedoe", "ilovejohn")) {
+ handler(request, response);
+ } else {
+ let body = "Unauthorized";
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.setHeader("Content-Type", "text/plain");
+ response.bodyOutputStream.write(body, body.length);
+ }
+ };
+}
+
+function run_test() {
+ let logger = Log.repository.rootLogger;
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ run_next_test();
+}
+
+add_test(function test_offline() {
+ try {
+ _("The right bits are set when we're offline.");
+ Services.io.offline = true;
+ do_check_false(!!Service.login());
+ do_check_eq(Service.status.login, LOGIN_FAILED_NETWORK_ERROR);
+ Services.io.offline = false;
+ } finally {
+ Svc.Prefs.resetBranch("");
+ run_next_test();
+ }
+});
+
+function setup() {
+ let janeHelper = track_collections_helper();
+ let janeU = janeHelper.with_updated_collection;
+ let janeColls = janeHelper.collections;
+ let johnHelper = track_collections_helper();
+ let johnU = johnHelper.with_updated_collection;
+ let johnColls = johnHelper.collections;
+
+ let server = httpd_setup({
+ "/1.1/johndoe/info/collections": login_handling(johnHelper.handler),
+ "/1.1/janedoe/info/collections": login_handling(janeHelper.handler),
+
+ // We need these handlers because we test login, and login
+ // is where keys are generated or fetched.
+ // TODO: have Jane fetch her keys, not generate them...
+ "/1.1/johndoe/storage/crypto/keys": johnU("crypto", new ServerWBO("keys").handler()),
+ "/1.1/johndoe/storage/meta/global": johnU("meta", new ServerWBO("global").handler()),
+ "/1.1/janedoe/storage/crypto/keys": janeU("crypto", new ServerWBO("keys").handler()),
+ "/1.1/janedoe/storage/meta/global": janeU("meta", new ServerWBO("global").handler())
+ });
+
+ Service.serverURL = server.baseURI;
+ return server;
+}
+
+add_test(function test_login_logout() {
+ let server = setup();
+
+ try {
+ _("Force the initial state.");
+ ensureLegacyIdentityManager();
+ Service.status.service = STATUS_OK;
+ do_check_eq(Service.status.service, STATUS_OK);
+
+ _("Try logging in. It won't work because we're not configured yet.");
+ Service.login();
+ do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_NO_USERNAME);
+ do_check_false(Service.isLoggedIn);
+
+ _("Try again with username and password set.");
+ Service.identity.account = "johndoe";
+ Service.identity.basicPassword = "ilovejane";
+ Service.login();
+ do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_NO_PASSPHRASE);
+ do_check_false(Service.isLoggedIn);
+
+ _("Success if passphrase is set.");
+ Service.identity.syncKey = "foo";
+ Service.login();
+ do_check_eq(Service.status.service, STATUS_OK);
+ do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ _("We can also pass username, password and passphrase to login().");
+ Service.login("janedoe", "incorrectpassword", "bar");
+ setBasicCredentials("janedoe", "incorrectpassword", "bar");
+ do_check_eq(Service.status.service, LOGIN_FAILED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_LOGIN_REJECTED);
+ do_check_false(Service.isLoggedIn);
+
+ _("Try again with correct password.");
+ Service.login("janedoe", "ilovejohn");
+ do_check_eq(Service.status.service, STATUS_OK);
+ do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ _("Calling login() with parameters when the client is unconfigured sends notification.");
+ let notified = false;
+ Svc.Obs.add("weave:service:setup-complete", function() {
+ notified = true;
+ });
+ setBasicCredentials(null, null, null);
+ Service.login("janedoe", "ilovejohn", "bar");
+ do_check_true(notified);
+ do_check_eq(Service.status.service, STATUS_OK);
+ do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
+ do_check_true(Service.isLoggedIn);
+
+ _("Logout.");
+ Service.logout();
+ do_check_false(Service.isLoggedIn);
+
+ _("Logging out again won't do any harm.");
+ Service.logout();
+ do_check_false(Service.isLoggedIn);
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_login_on_sync() {
+ let server = setup();
+ setBasicCredentials("johndoe", "ilovejane", "bar");
+
+ try {
+ _("Sync calls login.");
+ let oldLogin = Service.login;
+ let loginCalled = false;
+ Service.login = function() {
+ loginCalled = true;
+ Service.status.login = LOGIN_SUCCEEDED;
+ this._loggedIn = false; // So that sync aborts.
+ return true;
+ };
+
+ Service.sync();
+
+ do_check_true(loginCalled);
+ Service.login = oldLogin;
+
+ // Stub mpLocked.
+ let mpLockedF = Utils.mpLocked;
+ let mpLocked = true;
+ Utils.mpLocked = () => mpLocked;
+
+ // Stub scheduleNextSync. This gets called within checkSyncStatus if we're
+ // ready to sync, so use it as an indicator.
+ let scheduleNextSyncF = Service.scheduler.scheduleNextSync;
+ let scheduleCalled = false;
+ Service.scheduler.scheduleNextSync = function(wait) {
+ scheduleCalled = true;
+ scheduleNextSyncF.call(this, wait);
+ };
+
+ // Autoconnect still tries to connect in the background (useful behavior:
+ // for non-MP users and unlocked MPs, this will detect version expiry
+ // earlier).
+ //
+ // Consequently, non-MP users will be logged in as in the pre-Bug 543784 world,
+ // and checkSyncStatus reflects that by waiting for login.
+ //
+ // This process doesn't apply if your MP is still locked, so we make
+ // checkSyncStatus accept a locked MP in place of being logged in.
+ //
+ // This test exercises these two branches.
+
+ _("We're ready to sync if locked.");
+ Service.enabled = true;
+ Services.io.offline = false;
+ Service.scheduler.checkSyncStatus();
+ do_check_true(scheduleCalled);
+
+ _("... and also if we're not locked.");
+ scheduleCalled = false;
+ mpLocked = false;
+ Service.scheduler.checkSyncStatus();
+ do_check_true(scheduleCalled);
+ Service.scheduler.scheduleNextSync = scheduleNextSyncF;
+
+ // TODO: need better tests around master password prompting. See Bug 620583.
+
+ mpLocked = true;
+
+ // Testing exception handling if master password dialog is canceled.
+ // Do this by monkeypatching.
+ let oldGetter = Service.identity.__lookupGetter__("syncKey");
+ let oldSetter = Service.identity.__lookupSetter__("syncKey");
+ _("Old passphrase function is " + oldGetter);
+ Service.identity.__defineGetter__("syncKey",
+ function() {
+ throw "User canceled Master Password entry";
+ });
+
+ let oldClearSyncTriggers = Service.scheduler.clearSyncTriggers;
+ let oldLockedSync = Service._lockedSync;
+
+ let cSTCalled = false;
+ let lockedSyncCalled = false;
+
+ Service.scheduler.clearSyncTriggers = function() { cSTCalled = true; };
+ Service._lockedSync = function() { lockedSyncCalled = true; };
+
+ _("If master password is canceled, login fails and we report lockage.");
+ do_check_false(!!Service.login());
+ do_check_eq(Service.status.login, MASTER_PASSWORD_LOCKED);
+ do_check_eq(Service.status.service, LOGIN_FAILED);
+ _("Locked? " + Utils.mpLocked());
+ _("checkSync reports the correct term.");
+ do_check_eq(Service._checkSync(), kSyncMasterPasswordLocked);
+
+ _("Sync doesn't proceed and clears triggers if MP is still locked.");
+ Service.sync();
+
+ do_check_true(cSTCalled);
+ do_check_false(lockedSyncCalled);
+
+ Service.identity.__defineGetter__("syncKey", oldGetter);
+ Service.identity.__defineSetter__("syncKey", oldSetter);
+
+ // N.B., a bunch of methods are stubbed at this point. Be careful putting
+ // new tests after this point!
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(run_next_test);
+ }
+});
diff --git a/services/sync/tests/unit/test_service_migratePrefs.js b/services/sync/tests/unit/test_service_migratePrefs.js
new file mode 100644
index 000000000..89a147c06
--- /dev/null
+++ b/services/sync/tests/unit/test_service_migratePrefs.js
@@ -0,0 +1,70 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://services-sync/util.js");
+
+function test_migrate_logging() {
+ _("Testing log pref migration.");
+ Svc.Prefs.set("log.appender.debugLog", "Warn");
+ Svc.Prefs.set("log.appender.debugLog.enabled", true);
+ do_check_true(Svc.Prefs.get("log.appender.debugLog.enabled"));
+ do_check_eq(Svc.Prefs.get("log.appender.file.level"), "Trace");
+ do_check_eq(Svc.Prefs.get("log.appender.file.logOnSuccess"), false);
+
+ Service._migratePrefs();
+
+ do_check_eq("Warn", Svc.Prefs.get("log.appender.file.level"));
+ do_check_true(Svc.Prefs.get("log.appender.file.logOnSuccess"));
+ do_check_eq(Svc.Prefs.get("log.appender.debugLog"), undefined);
+ do_check_eq(Svc.Prefs.get("log.appender.debugLog.enabled"), undefined);
+};
+
+function run_test() {
+ _("Set some prefs on the old branch");
+ let globalPref = new Preferences("");
+ globalPref.set("extensions.weave.hello", "world");
+ globalPref.set("extensions.weave.number", 42);
+ globalPref.set("extensions.weave.yes", true);
+ globalPref.set("extensions.weave.no", false);
+
+ _("Make sure the old prefs are there");
+ do_check_eq(globalPref.get("extensions.weave.hello"), "world");
+ do_check_eq(globalPref.get("extensions.weave.number"), 42);
+ do_check_eq(globalPref.get("extensions.weave.yes"), true);
+ do_check_eq(globalPref.get("extensions.weave.no"), false);
+
+ _("New prefs shouldn't exist yet");
+ do_check_eq(globalPref.get("services.sync.hello"), null);
+ do_check_eq(globalPref.get("services.sync.number"), null);
+ do_check_eq(globalPref.get("services.sync.yes"), null);
+ do_check_eq(globalPref.get("services.sync.no"), null);
+
+ _("Loading service should migrate");
+ Cu.import("resource://services-sync/service.js");
+ do_check_eq(globalPref.get("services.sync.hello"), "world");
+ do_check_eq(globalPref.get("services.sync.number"), 42);
+ do_check_eq(globalPref.get("services.sync.yes"), true);
+ do_check_eq(globalPref.get("services.sync.no"), false);
+ do_check_eq(globalPref.get("extensions.weave.hello"), null);
+ do_check_eq(globalPref.get("extensions.weave.number"), null);
+ do_check_eq(globalPref.get("extensions.weave.yes"), null);
+ do_check_eq(globalPref.get("extensions.weave.no"), null);
+
+ _("Migrating should set a pref to make sure to not re-migrate");
+ do_check_true(globalPref.get("services.sync.migrated"));
+
+ _("Make sure re-migrate doesn't happen");
+ globalPref.set("extensions.weave.tooLate", "already migrated!");
+ do_check_eq(globalPref.get("extensions.weave.tooLate"), "already migrated!");
+ do_check_eq(globalPref.get("services.sync.tooLate"), null);
+ Service._migratePrefs();
+ do_check_eq(globalPref.get("extensions.weave.tooLate"), "already migrated!");
+ do_check_eq(globalPref.get("services.sync.tooLate"), null);
+
+ _("Clearing out pref changes for other tests");
+ globalPref.resetBranch("extensions.weave.");
+ globalPref.resetBranch("services.sync.");
+
+ test_migrate_logging();
+}
diff --git a/services/sync/tests/unit/test_service_passwordUTF8.js b/services/sync/tests/unit/test_service_passwordUTF8.js
new file mode 100644
index 000000000..e781050b3
--- /dev/null
+++ b/services/sync/tests/unit/test_service_passwordUTF8.js
@@ -0,0 +1,95 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+const JAPANESE = "\u34ff\u35ff\u36ff\u37ff";
+const APPLES = "\uf8ff\uf8ff\uf8ff\uf8ff";
+const LOWBYTES = "\xff\xff\xff\xff";
+
+// Poor man's /etc/passwd. Static since there's no btoa()/atob() in xpcshell.
+var basicauth = {};
+basicauth[LOWBYTES] = "Basic am9obmRvZTr/////";
+basicauth[Utils.encodeUTF8(JAPANESE)] = "Basic am9obmRvZTrjk7/jl7/jm7/jn78=";
+
+// Global var for the server password, read by info_collections(),
+// modified by change_password().
+var server_password;
+
+function login_handling(handler) {
+ return function (request, response) {
+ let basic = basicauth[server_password];
+
+ if (basic && (request.getHeader("Authorization") == basic)) {
+ handler(request, response);
+ } else {
+ let body = "Unauthorized";
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ response.bodyOutputStream.write(body, body.length);
+ }
+ };
+}
+
+function change_password(request, response) {
+ let body, statusCode, status;
+ let basic = basicauth[server_password];
+
+ if (basic && (request.getHeader("Authorization") == basic)) {
+ server_password = readBytesFromInputStream(request.bodyInputStream);
+ body = "";
+ statusCode = 200;
+ status = "OK";
+ } else {
+ statusCode = 401;
+ body = status = "Unauthorized";
+ }
+ response.setStatusLine(request.httpVersion, statusCode, status);
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+ let collections = collectionsHelper.collections;
+
+ ensureLegacyIdentityManager();
+
+ do_test_pending();
+ let server = httpd_setup({
+ "/1.1/johndoe/info/collections": login_handling(collectionsHelper.handler),
+ "/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler()),
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
+ "/user/1.0/johndoe/password": change_password
+ });
+
+ setBasicCredentials("johndoe", JAPANESE, "irrelevant");
+ Service.serverURL = server.baseURI;
+
+ try {
+ _("Try to log in with the password.");
+ server_password = "foobar";
+ do_check_false(Service.verifyLogin());
+ do_check_eq(server_password, "foobar");
+
+ _("Make the server password the low byte version of our password.");
+ server_password = LOWBYTES;
+ do_check_false(Service.verifyLogin());
+ do_check_eq(server_password, LOWBYTES);
+
+ _("Can't use a password that has the same low bytes as ours.");
+ server_password = Utils.encodeUTF8(JAPANESE);
+ Service.identity.basicPassword = APPLES;
+ do_check_false(Service.verifyLogin());
+ do_check_eq(server_password, Utils.encodeUTF8(JAPANESE));
+
+ } finally {
+ server.stop(do_test_finished);
+ Svc.Prefs.resetBranch("");
+ }
+}
diff --git a/services/sync/tests/unit/test_service_persistLogin.js b/services/sync/tests/unit/test_service_persistLogin.js
new file mode 100644
index 000000000..9d4a1e51a
--- /dev/null
+++ b/services/sync/tests/unit/test_service_persistLogin.js
@@ -0,0 +1,46 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ try {
+ // Ensure we have a blank slate to start.
+ ensureLegacyIdentityManager();
+ Services.logins.removeAllLogins();
+
+ setBasicCredentials("johndoe", "ilovejane", "abbbbbcccccdddddeeeeefffff");
+
+ _("Confirm initial environment is empty.");
+ let logins = Services.logins.findLogins({}, PWDMGR_HOST, null,
+ PWDMGR_PASSWORD_REALM);
+ do_check_eq(logins.length, 0);
+ logins = Services.logins.findLogins({}, PWDMGR_HOST, null,
+ PWDMGR_PASSPHRASE_REALM);
+ do_check_eq(logins.length, 0);
+
+ _("Persist logins to the login service");
+ Service.persistLogin();
+
+ _("The password has been persisted in the login service.");
+ logins = Services.logins.findLogins({}, PWDMGR_HOST, null,
+ PWDMGR_PASSWORD_REALM);
+ do_check_eq(logins.length, 1);
+ do_check_eq(logins[0].username, "johndoe");
+ do_check_eq(logins[0].password, "ilovejane");
+
+ _("The passphrase has been persisted in the login service.");
+ logins = Services.logins.findLogins({}, PWDMGR_HOST, null,
+ PWDMGR_PASSPHRASE_REALM);
+ do_check_eq(logins.length, 1);
+ do_check_eq(logins[0].username, "johndoe");
+ do_check_eq(logins[0].password, "abbbbbcccccdddddeeeeefffff");
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ Services.logins.removeAllLogins();
+ }
+}
diff --git a/services/sync/tests/unit/test_service_set_serverURL.js b/services/sync/tests/unit/test_service_set_serverURL.js
new file mode 100644
index 000000000..6fef2bfaa
--- /dev/null
+++ b/services/sync/tests/unit/test_service_set_serverURL.js
@@ -0,0 +1,13 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ Service.serverURL = "http://example.com/sync";
+ do_check_eq(Service.serverURL, "http://example.com/sync/");
+
+ Service.serverURL = "http://example.com/sync/";
+ do_check_eq(Service.serverURL, "http://example.com/sync/");
+}
+
diff --git a/services/sync/tests/unit/test_service_startOver.js b/services/sync/tests/unit/test_service_startOver.js
new file mode 100644
index 000000000..899420548
--- /dev/null
+++ b/services/sync/tests/unit/test_service_startOver.js
@@ -0,0 +1,101 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function BlaEngine() {
+ SyncEngine.call(this, "Bla", Service);
+}
+BlaEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+
+ removed: false,
+ removeClientData: function() {
+ this.removed = true;
+ }
+
+};
+
+Service.engineManager.register(BlaEngine);
+
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+add_identity_test(this, function* test_resetLocalData() {
+ yield configureIdentity();
+ Service.status.enforceBackoff = true;
+ Service.status.backoffInterval = 42;
+ Service.status.minimumNextSync = 23;
+ Service.persistLogin();
+
+ // Verify set up.
+ do_check_eq(Service.status.checkSetup(), STATUS_OK);
+
+ // Verify state that the observer sees.
+ let observerCalled = false;
+ Svc.Obs.add("weave:service:start-over", function onStartOver() {
+ Svc.Obs.remove("weave:service:start-over", onStartOver);
+ observerCalled = true;
+
+ do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
+ });
+
+ Service.startOver();
+ do_check_true(observerCalled);
+
+ // Verify the site was nuked from orbit.
+ do_check_eq(Svc.Prefs.get("username"), undefined);
+ do_check_eq(Service.identity.basicPassword, null);
+ do_check_eq(Service.identity.syncKey, null);
+
+ do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
+ do_check_false(Service.status.enforceBackoff);
+ do_check_eq(Service.status.backoffInterval, 0);
+ do_check_eq(Service.status.minimumNextSync, 0);
+});
+
+add_test(function test_removeClientData() {
+ let engine = Service.engineManager.get("bla");
+
+ // No cluster URL = no removal.
+ do_check_false(engine.removed);
+ Service.startOver();
+ do_check_false(engine.removed);
+
+ Service.serverURL = "https://localhost/";
+ Service.clusterURL = Service.serverURL;
+
+ do_check_false(engine.removed);
+ Service.startOver();
+ do_check_true(engine.removed);
+
+ run_next_test();
+});
+
+add_test(function test_reset_SyncScheduler() {
+ // Some non-default values for SyncScheduler's attributes.
+ Service.scheduler.idle = true;
+ Service.scheduler.hasIncomingItems = true;
+ Service.scheduler.numClients = 42;
+ Service.scheduler.nextSync = Date.now();
+ Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
+ Service.scheduler.syncInterval = Service.scheduler.activeInterval;
+
+ Service.startOver();
+
+ do_check_false(Service.scheduler.idle);
+ do_check_false(Service.scheduler.hasIncomingItems);
+ do_check_eq(Service.scheduler.numClients, 0);
+ do_check_eq(Service.scheduler.nextSync, 0);
+ do_check_eq(Service.scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
+ do_check_eq(Service.scheduler.syncInterval, Service.scheduler.singleDeviceInterval);
+
+ run_next_test();
+});
diff --git a/services/sync/tests/unit/test_service_startup.js b/services/sync/tests/unit/test_service_startup.js
new file mode 100644
index 000000000..5148f6d13
--- /dev/null
+++ b/services/sync/tests/unit/test_service_startup.js
@@ -0,0 +1,49 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Svc.Prefs.set("registerEngines", "Tab,Bookmarks,Form,History");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ validate_all_future_pings();
+ _("When imported, Service.onStartup is called");
+ initTestLogging("Trace");
+
+ let xps = Cc["@mozilla.org/weave/service;1"]
+ .getService(Ci.nsISupports)
+ .wrappedJSObject;
+ do_check_false(xps.enabled);
+
+ // Test fixtures
+ Service.identity.username = "johndoe";
+ do_check_true(xps.enabled);
+
+ Cu.import("resource://services-sync/service.js");
+
+ _("Service is enabled.");
+ do_check_eq(Service.enabled, true);
+
+ _("Engines are registered.");
+ let engines = Service.engineManager.getAll();
+ do_check_true(Utils.deepEquals(engines.map(engine => engine.name),
+ ['tabs', 'bookmarks', 'forms', 'history']));
+
+ _("Observers are notified of startup");
+ do_test_pending();
+
+ do_check_false(Service.status.ready);
+ do_check_false(xps.ready);
+ Observers.add("weave:service:ready", function (subject, data) {
+ do_check_true(Service.status.ready);
+ do_check_true(xps.ready);
+
+ // Clean up.
+ Svc.Prefs.resetBranch("");
+ do_test_finished();
+ });
+}
diff --git a/services/sync/tests/unit/test_service_sync_401.js b/services/sync/tests/unit/test_service_sync_401.js
new file mode 100644
index 000000000..9e9db8137
--- /dev/null
+++ b/services/sync/tests/unit/test_service_sync_401.js
@@ -0,0 +1,84 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function login_handling(handler) {
+ return function (request, response) {
+ if (basic_auth_matches(request, "johndoe", "ilovejane")) {
+ handler(request, response);
+ } else {
+ let body = "Unauthorized";
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.bodyOutputStream.write(body, body.length);
+ }
+ };
+}
+
+function run_test() {
+ let logger = Log.repository.rootLogger;
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+ let collections = collectionsHelper.collections;
+
+ do_test_pending();
+ let server = httpd_setup({
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
+ "/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler()),
+ "/1.1/johndoe/info/collections": login_handling(collectionsHelper.handler)
+ });
+
+ const GLOBAL_SCORE = 42;
+
+ try {
+ _("Set up test fixtures.");
+ new SyncTestingInfrastructure(server, "johndoe", "ilovejane", "foo");
+ Service.scheduler.globalScore = GLOBAL_SCORE;
+ // Avoid daily ping
+ Svc.Prefs.set("lastPing", Math.floor(Date.now() / 1000));
+
+ let threw = false;
+ Svc.Obs.add("weave:service:sync:error", function (subject, data) {
+ threw = true;
+ });
+
+ _("Initial state: We're successfully logged in.");
+ Service.login();
+ do_check_true(Service.isLoggedIn);
+ do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
+
+ _("Simulate having changed the password somewhere else.");
+ Service.identity.basicPassword = "ilovejosephine";
+
+ _("Let's try to sync.");
+ Service.sync();
+
+ _("Verify that sync() threw an exception.");
+ do_check_true(threw);
+
+ _("We're no longer logged in.");
+ do_check_false(Service.isLoggedIn);
+
+ _("Sync status won't have changed yet, because we haven't tried again.");
+
+ _("globalScore is reset upon starting a sync.");
+ do_check_eq(Service.scheduler.globalScore, 0);
+
+ _("Our next sync will fail appropriately.");
+ try {
+ Service.sync();
+ } catch (ex) {
+ }
+ do_check_eq(Service.status.login, LOGIN_FAILED_LOGIN_REJECTED);
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(do_test_finished);
+ }
+}
diff --git a/services/sync/tests/unit/test_service_sync_locked.js b/services/sync/tests/unit/test_service_sync_locked.js
new file mode 100644
index 000000000..ee952c7ee
--- /dev/null
+++ b/services/sync/tests/unit/test_service_sync_locked.js
@@ -0,0 +1,37 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ validate_all_future_pings();
+ let debug = [];
+ let info = [];
+
+ function augmentLogger(old) {
+ let d = old.debug;
+ let i = old.info;
+ // For the purposes of this test we don't need to do full formatting
+ // of the 2nd param, as the ones we care about are always strings.
+ old.debug = function(m, p) { debug.push(p ? m + ": " + p : m); d.call(old, m, p); }
+ old.info = function(m, p) { info.push(p ? m + ": " + p : m); i.call(old, m, p); }
+ return old;
+ }
+
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ augmentLogger(Service._log);
+
+ // Avoid daily ping
+ Svc.Prefs.set("lastPing", Math.floor(Date.now() / 1000));
+
+ _("Check that sync will log appropriately if already in 'progress'.");
+ Service._locked = true;
+ Service.sync();
+ Service._locked = false;
+
+ do_check_true(debug[debug.length - 2].startsWith("Exception calling WrappedLock: Could not acquire lock. Label: \"service.js: login\"."));
+ do_check_eq(info[info.length - 1], "Cannot start sync: already syncing?");
+}
+
diff --git a/services/sync/tests/unit/test_service_sync_remoteSetup.js b/services/sync/tests/unit/test_service_sync_remoteSetup.js
new file mode 100644
index 000000000..83dbf3cd7
--- /dev/null
+++ b/services/sync/tests/unit/test_service_sync_remoteSetup.js
@@ -0,0 +1,237 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/keys.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/fakeservices.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ validate_all_future_pings();
+ let logger = Log.repository.rootLogger;
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ let guidSvc = new FakeGUIDService();
+ let clients = new ServerCollection();
+ let meta_global = new ServerWBO('global');
+
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+ let collections = collectionsHelper.collections;
+
+ function wasCalledHandler(wbo) {
+ let handler = wbo.handler();
+ return function() {
+ wbo.wasCalled = true;
+ handler.apply(this, arguments);
+ };
+ }
+
+ let keysWBO = new ServerWBO("keys");
+ let cryptoColl = new ServerCollection({keys: keysWBO});
+ let metaColl = new ServerCollection({global: meta_global});
+ do_test_pending();
+
+ /**
+ * Handle the bulk DELETE request sent by wipeServer.
+ */
+ function storageHandler(request, response) {
+ do_check_eq("DELETE", request.method);
+ do_check_true(request.hasHeader("X-Confirm-Delete"));
+
+ _("Wiping out all collections.");
+ cryptoColl.delete({});
+ clients.delete({});
+ metaColl.delete({});
+
+ let ts = new_timestamp();
+ collectionsHelper.update_collection("crypto", ts);
+ collectionsHelper.update_collection("clients", ts);
+ collectionsHelper.update_collection("meta", ts);
+ return_timestamp(request, response, ts);
+ }
+
+ const GLOBAL_PATH = "/1.1/johndoe/storage/meta/global";
+ const INFO_PATH = "/1.1/johndoe/info/collections";
+
+ let handlers = {
+ "/1.1/johndoe/storage": storageHandler,
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", keysWBO.handler()),
+ "/1.1/johndoe/storage/crypto": upd("crypto", cryptoColl.handler()),
+ "/1.1/johndoe/storage/clients": upd("clients", clients.handler()),
+ "/1.1/johndoe/storage/meta": upd("meta", wasCalledHandler(metaColl)),
+ "/1.1/johndoe/storage/meta/global": upd("meta", wasCalledHandler(meta_global)),
+ "/1.1/johndoe/info/collections": collectionsHelper.handler
+ };
+
+ function mockHandler(path, mock) {
+ server.registerPathHandler(path, mock(handlers[path]));
+ return {
+ restore() { server.registerPathHandler(path, handlers[path]); }
+ }
+ }
+
+ let server = httpd_setup(handlers);
+
+ try {
+ _("Log in.");
+ ensureLegacyIdentityManager();
+ Service.serverURL = server.baseURI;
+
+ _("Checking Status.sync with no credentials.");
+ Service.verifyAndFetchSymmetricKeys();
+ do_check_eq(Service.status.sync, CREDENTIALS_CHANGED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_NO_PASSPHRASE);
+
+ _("Log in with an old secret phrase, is upgraded to Sync Key.");
+ Service.login("johndoe", "ilovejane", "my old secret phrase!!1!");
+ _("End of login");
+ do_check_true(Service.isLoggedIn);
+ do_check_true(Utils.isPassphrase(Service.identity.syncKey));
+ let syncKey = Service.identity.syncKey;
+ Service.startOver();
+
+ Service.serverURL = server.baseURI;
+ Service.login("johndoe", "ilovejane", syncKey);
+ do_check_true(Service.isLoggedIn);
+
+ _("Checking that remoteSetup returns true when credentials have changed.");
+ Service.recordManager.get(Service.metaURL).payload.syncID = "foobar";
+ do_check_true(Service._remoteSetup());
+
+ let returnStatusCode = (method, code) => (oldMethod) => (req, res) => {
+ if (req.method === method) {
+ res.setStatusLine(req.httpVersion, code, "");
+ } else {
+ oldMethod(req, res);
+ }
+ };
+
+ let mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 401));
+ Service.recordManager.del(Service.metaURL);
+ _("Checking that remoteSetup returns false on 401 on first get /meta/global.");
+ do_check_false(Service._remoteSetup());
+ mock.restore();
+
+ Service.login("johndoe", "ilovejane", syncKey);
+ mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 503));
+ Service.recordManager.del(Service.metaURL);
+ _("Checking that remoteSetup returns false on 503 on first get /meta/global.");
+ do_check_false(Service._remoteSetup());
+ do_check_eq(Service.status.sync, METARECORD_DOWNLOAD_FAIL);
+ mock.restore();
+
+ mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 404));
+ Service.recordManager.del(Service.metaURL);
+ _("Checking that remoteSetup recovers on 404 on first get /meta/global.");
+ do_check_true(Service._remoteSetup());
+ mock.restore();
+
+ let makeOutdatedMeta = () => {
+ Service.metaModified = 0;
+ let infoResponse = Service._fetchInfo();
+ return {
+ status: infoResponse.status,
+ obj: {
+ crypto: infoResponse.obj.crypto,
+ clients: infoResponse.obj.clients,
+ meta: 1
+ }
+ };
+ }
+
+ _("Checking that remoteSetup recovers on 404 on get /meta/global after clear cached one.");
+ mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 404));
+ Service.recordManager.set(Service.metaURL, { isNew: false });
+ do_check_true(Service._remoteSetup(makeOutdatedMeta()));
+ mock.restore();
+
+ _("Checking that remoteSetup returns false on 503 on get /meta/global after clear cached one.");
+ mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 503));
+ Service.status.sync = "";
+ Service.recordManager.set(Service.metaURL, { isNew: false });
+ do_check_false(Service._remoteSetup(makeOutdatedMeta()));
+ do_check_eq(Service.status.sync, "");
+ mock.restore();
+
+ metaColl.delete({});
+
+ _("Do an initial sync.");
+ let beforeSync = Date.now()/1000;
+ Service.sync();
+
+ _("Checking that remoteSetup returns true.");
+ do_check_true(Service._remoteSetup());
+
+ _("Verify that the meta record was uploaded.");
+ do_check_eq(meta_global.data.syncID, Service.syncID);
+ do_check_eq(meta_global.data.storageVersion, STORAGE_VERSION);
+ do_check_eq(meta_global.data.engines.clients.version, Service.clientsEngine.version);
+ do_check_eq(meta_global.data.engines.clients.syncID, Service.clientsEngine.syncID);
+
+ _("Set the collection info hash so that sync() will remember the modified times for future runs.");
+ collections.meta = Service.clientsEngine.lastSync;
+ collections.clients = Service.clientsEngine.lastSync;
+ Service.sync();
+
+ _("Sync again and verify that meta/global wasn't downloaded again");
+ meta_global.wasCalled = false;
+ Service.sync();
+ do_check_false(meta_global.wasCalled);
+
+ _("Fake modified records. This will cause a redownload, but not reupload since it hasn't changed.");
+ collections.meta += 42;
+ meta_global.wasCalled = false;
+
+ let metaModified = meta_global.modified;
+
+ Service.sync();
+ do_check_true(meta_global.wasCalled);
+ do_check_eq(metaModified, meta_global.modified);
+
+ _("Checking bad passphrases.");
+ let pp = Service.identity.syncKey;
+ Service.identity.syncKey = "notvalid";
+ do_check_false(Service.verifyAndFetchSymmetricKeys());
+ do_check_eq(Service.status.sync, CREDENTIALS_CHANGED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_INVALID_PASSPHRASE);
+ Service.identity.syncKey = pp;
+ do_check_true(Service.verifyAndFetchSymmetricKeys());
+
+ // changePassphrase wipes our keys, and they're regenerated on next sync.
+ _("Checking changed passphrase.");
+ let existingDefault = Service.collectionKeys.keyForCollection();
+ let existingKeysPayload = keysWBO.payload;
+ let newPassphrase = "bbbbbabcdeabcdeabcdeabcdea";
+ Service.changePassphrase(newPassphrase);
+
+ _("Local key cache is full, but different.");
+ do_check_true(!!Service.collectionKeys._default);
+ do_check_false(Service.collectionKeys._default.equals(existingDefault));
+
+ _("Server has new keys.");
+ do_check_true(!!keysWBO.payload);
+ do_check_true(!!keysWBO.modified);
+ do_check_neq(keysWBO.payload, existingKeysPayload);
+
+ // Try to screw up HMAC calculation.
+ // Re-encrypt keys with a new random keybundle, and upload them to the
+ // server, just as might happen with a second client.
+ _("Attempting to screw up HMAC by re-encrypting keys.");
+ let keys = Service.collectionKeys.asWBO();
+ let b = new BulkKeyBundle("hmacerror");
+ b.generateRandom();
+ collections.crypto = keys.modified = 100 + (Date.now()/1000); // Future modification time.
+ keys.encrypt(b);
+ keys.upload(Service.resource(Service.cryptoKeysURL));
+
+ do_check_false(Service.verifyAndFetchSymmetricKeys());
+ do_check_eq(Service.status.login, LOGIN_FAILED_INVALID_PASSPHRASE);
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(do_test_finished);
+ }
+}
diff --git a/services/sync/tests/unit/test_service_sync_specified.js b/services/sync/tests/unit/test_service_sync_specified.js
new file mode 100644
index 000000000..7cb0f9d9c
--- /dev/null
+++ b/services/sync/tests/unit/test_service_sync_specified.js
@@ -0,0 +1,160 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+initTestLogging();
+Service.engineManager.clear();
+
+let syncedEngines = []
+
+function SteamEngine() {
+ SyncEngine.call(this, "Steam", Service);
+}
+SteamEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ _sync: function _sync() {
+ syncedEngines.push(this.name);
+ }
+};
+Service.engineManager.register(SteamEngine);
+
+function StirlingEngine() {
+ SyncEngine.call(this, "Stirling", Service);
+}
+StirlingEngine.prototype = {
+ __proto__: SteamEngine.prototype,
+ _sync: function _sync() {
+ syncedEngines.push(this.name);
+ }
+};
+Service.engineManager.register(StirlingEngine);
+
+// Tracking info/collections.
+var collectionsHelper = track_collections_helper();
+var upd = collectionsHelper.with_updated_collection;
+
+function sync_httpd_setup(handlers) {
+
+ handlers["/1.1/johndoe/info/collections"] = collectionsHelper.handler;
+ delete collectionsHelper.collections.crypto;
+ delete collectionsHelper.collections.meta;
+
+ let cr = new ServerWBO("keys");
+ handlers["/1.1/johndoe/storage/crypto/keys"] =
+ upd("crypto", cr.handler());
+
+ let cl = new ServerCollection();
+ handlers["/1.1/johndoe/storage/clients"] =
+ upd("clients", cl.handler());
+
+ return httpd_setup(handlers);
+}
+
+function setUp() {
+ syncedEngines = [];
+ let engine = Service.engineManager.get("steam");
+ engine.enabled = true;
+ engine.syncPriority = 1;
+
+ engine = Service.engineManager.get("stirling");
+ engine.enabled = true;
+ engine.syncPriority = 2;
+
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": new ServerWBO("global", {}).handler(),
+ });
+ new SyncTestingInfrastructure(server, "johndoe", "ilovejane",
+ "abcdeabcdeabcdeabcdeabcdea");
+ return server;
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ validate_all_future_pings();
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+
+ run_next_test();
+}
+
+add_test(function test_noEngines() {
+ _("Test: An empty array of engines to sync does nothing.");
+ let server = setUp();
+
+ try {
+ _("Sync with no engines specified.");
+ Service.sync([]);
+ deepEqual(syncedEngines, [], "no engines were synced");
+
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_oneEngine() {
+ _("Test: Only one engine is synced.");
+ let server = setUp();
+
+ try {
+
+ _("Sync with 1 engine specified.");
+ Service.sync(["steam"]);
+ deepEqual(syncedEngines, ["steam"])
+
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_bothEnginesSpecified() {
+ _("Test: All engines are synced when specified in the correct order (1).");
+ let server = setUp();
+
+ try {
+ _("Sync with both engines specified.");
+ Service.sync(["steam", "stirling"]);
+ deepEqual(syncedEngines, ["steam", "stirling"])
+
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_bothEnginesSpecified() {
+ _("Test: All engines are synced when specified in the correct order (2).");
+ let server = setUp();
+
+ try {
+ _("Sync with both engines specified.");
+ Service.sync(["stirling", "steam"]);
+ deepEqual(syncedEngines, ["stirling", "steam"])
+
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_bothEnginesDefault() {
+ _("Test: All engines are synced when nothing is specified.");
+ let server = setUp();
+
+ try {
+ Service.sync();
+ deepEqual(syncedEngines, ["steam", "stirling"])
+
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
diff --git a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
new file mode 100644
index 000000000..ee1800fd3
--- /dev/null
+++ b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
@@ -0,0 +1,442 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+initTestLogging();
+Service.engineManager.clear();
+
+function QuietStore() {
+ Store.call("Quiet");
+}
+QuietStore.prototype = {
+ getAllIDs: function getAllIDs() {
+ return [];
+ }
+}
+
+function SteamEngine() {
+ SyncEngine.call(this, "Steam", Service);
+}
+SteamEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ // We're not interested in engine sync but what the service does.
+ _storeObj: QuietStore,
+
+ _sync: function _sync() {
+ this._syncStartup();
+ }
+};
+Service.engineManager.register(SteamEngine);
+
+function StirlingEngine() {
+ SyncEngine.call(this, "Stirling", Service);
+}
+StirlingEngine.prototype = {
+ __proto__: SteamEngine.prototype,
+ // This engine's enabled state is the same as the SteamEngine's.
+ get prefName() {
+ return "steam";
+ }
+};
+Service.engineManager.register(StirlingEngine);
+
+// Tracking info/collections.
+var collectionsHelper = track_collections_helper();
+var upd = collectionsHelper.with_updated_collection;
+
+function sync_httpd_setup(handlers) {
+
+ handlers["/1.1/johndoe/info/collections"] = collectionsHelper.handler;
+ delete collectionsHelper.collections.crypto;
+ delete collectionsHelper.collections.meta;
+
+ let cr = new ServerWBO("keys");
+ handlers["/1.1/johndoe/storage/crypto/keys"] =
+ upd("crypto", cr.handler());
+
+ let cl = new ServerCollection();
+ handlers["/1.1/johndoe/storage/clients"] =
+ upd("clients", cl.handler());
+
+ return httpd_setup(handlers);
+}
+
+function setUp(server) {
+ new SyncTestingInfrastructure(server, "johndoe", "ilovejane",
+ "abcdeabcdeabcdeabcdeabcdea");
+ // Ensure that the server has valid keys so that logging in will work and not
+ // result in a server wipe, rendering many of these tests useless.
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+}
+
+const PAYLOAD = 42;
+
+
+function run_test() {
+ initTestLogging("Trace");
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
+ validate_all_future_pings();
+
+ run_next_test();
+}
+
+add_test(function test_newAccount() {
+ _("Test: New account does not disable locally enabled engines.");
+ let engine = Service.engineManager.get("steam");
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": new ServerWBO("global", {}).handler(),
+ "/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler()
+ });
+ setUp(server);
+
+ try {
+ _("Engine is enabled from the beginning.");
+ Service._ignorePrefObserver = true;
+ engine.enabled = true;
+ Service._ignorePrefObserver = false;
+
+ _("Sync.");
+ Service.sync();
+
+ _("Engine continues to be enabled.");
+ do_check_true(engine.enabled);
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_enabledLocally() {
+ _("Test: Engine is disabled on remote clients and enabled locally");
+ Service.syncID = "abcdefghij";
+ let engine = Service.engineManager.get("steam");
+ let metaWBO = new ServerWBO("global", {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {}});
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": metaWBO.handler(),
+ "/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler()
+ });
+ setUp(server);
+
+ try {
+ _("Enable engine locally.");
+ engine.enabled = true;
+
+ _("Sync.");
+ Service.sync();
+
+ _("Meta record now contains the new engine.");
+ do_check_true(!!metaWBO.data.engines.steam);
+
+ _("Engine continues to be enabled.");
+ do_check_true(engine.enabled);
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_disabledLocally() {
+ _("Test: Engine is enabled on remote clients and disabled locally");
+ Service.syncID = "abcdefghij";
+ let engine = Service.engineManager.get("steam");
+ let metaWBO = new ServerWBO("global", {
+ syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {steam: {syncID: engine.syncID,
+ version: engine.version}}
+ });
+ let steamCollection = new ServerWBO("steam", PAYLOAD);
+
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": metaWBO.handler(),
+ "/1.1/johndoe/storage/steam": steamCollection.handler()
+ });
+ setUp(server);
+
+ try {
+ _("Disable engine locally.");
+ Service._ignorePrefObserver = true;
+ engine.enabled = true;
+ Service._ignorePrefObserver = false;
+ engine.enabled = false;
+
+ _("Sync.");
+ Service.sync();
+
+ _("Meta record no longer contains engine.");
+ do_check_false(!!metaWBO.data.engines.steam);
+
+ _("Server records are wiped.");
+ do_check_eq(steamCollection.payload, undefined);
+
+ _("Engine continues to be disabled.");
+ do_check_false(engine.enabled);
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_disabledLocally_wipe503() {
+ _("Test: Engine is enabled on remote clients and disabled locally");
+ Service.syncID = "abcdefghij";
+ let engine = Service.engineManager.get("steam");
+ let metaWBO = new ServerWBO("global", {
+ syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {steam: {syncID: engine.syncID,
+ version: engine.version}}
+ });
+ let steamCollection = new ServerWBO("steam", PAYLOAD);
+
+ function service_unavailable(request, response) {
+ let body = "Service Unavailable";
+ response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
+ response.setHeader("Retry-After", "23");
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": metaWBO.handler(),
+ "/1.1/johndoe/storage/steam": service_unavailable
+ });
+ setUp(server);
+
+ _("Disable engine locally.");
+ Service._ignorePrefObserver = true;
+ engine.enabled = true;
+ Service._ignorePrefObserver = false;
+ engine.enabled = false;
+
+ Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
+ Svc.Obs.remove("weave:ui:sync:error", onSyncError);
+
+ do_check_eq(Service.status.sync, SERVER_MAINTENANCE);
+
+ Service.startOver();
+ server.stop(run_next_test);
+ });
+
+ _("Sync.");
+ Service.errorHandler.syncAndReportErrors();
+});
+
+add_test(function test_enabledRemotely() {
+ _("Test: Engine is disabled locally and enabled on a remote client");
+ Service.syncID = "abcdefghij";
+ let engine = Service.engineManager.get("steam");
+ let metaWBO = new ServerWBO("global", {
+ syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {steam: {syncID: engine.syncID,
+ version: engine.version}}
+ });
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global":
+ upd("meta", metaWBO.handler()),
+
+ "/1.1/johndoe/storage/steam":
+ upd("steam", new ServerWBO("steam", {}).handler())
+ });
+ setUp(server);
+
+ // We need to be very careful how we do this, so that we don't trigger a
+ // fresh start!
+ try {
+ _("Upload some keys to avoid a fresh start.");
+ let wbo = Service.collectionKeys.generateNewKeysWBO();
+ wbo.encrypt(Service.identity.syncKeyBundle);
+ do_check_eq(200, wbo.upload(Service.resource(Service.cryptoKeysURL)).status);
+
+ _("Engine is disabled.");
+ do_check_false(engine.enabled);
+
+ _("Sync.");
+ Service.sync();
+
+ _("Engine is enabled.");
+ do_check_true(engine.enabled);
+
+ _("Meta record still present.");
+ do_check_eq(metaWBO.data.engines.steam.syncID, engine.syncID);
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_disabledRemotelyTwoClients() {
+ _("Test: Engine is enabled locally and disabled on a remote client... with two clients.");
+ Service.syncID = "abcdefghij";
+ let engine = Service.engineManager.get("steam");
+ let metaWBO = new ServerWBO("global", {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {}});
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global":
+ upd("meta", metaWBO.handler()),
+
+ "/1.1/johndoe/storage/steam":
+ upd("steam", new ServerWBO("steam", {}).handler())
+ });
+ setUp(server);
+
+ try {
+ _("Enable engine locally.");
+ Service._ignorePrefObserver = true;
+ engine.enabled = true;
+ Service._ignorePrefObserver = false;
+
+ _("Sync.");
+ Service.sync();
+
+ _("Disable engine by deleting from meta/global.");
+ let d = metaWBO.data;
+ delete d.engines["steam"];
+ metaWBO.payload = JSON.stringify(d);
+ metaWBO.modified = Date.now() / 1000;
+
+ _("Add a second client and verify that the local pref is changed.");
+ Service.clientsEngine._store._remoteClients["foobar"] = {name: "foobar", type: "desktop"};
+ Service.sync();
+
+ _("Engine is disabled.");
+ do_check_false(engine.enabled);
+
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_disabledRemotely() {
+ _("Test: Engine is enabled locally and disabled on a remote client");
+ Service.syncID = "abcdefghij";
+ let engine = Service.engineManager.get("steam");
+ let metaWBO = new ServerWBO("global", {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {}});
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": metaWBO.handler(),
+ "/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler()
+ });
+ setUp(server);
+
+ try {
+ _("Enable engine locally.");
+ Service._ignorePrefObserver = true;
+ engine.enabled = true;
+ Service._ignorePrefObserver = false;
+
+ _("Sync.");
+ Service.sync();
+
+ _("Engine is not disabled: only one client.");
+ do_check_true(engine.enabled);
+
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_dependentEnginesEnabledLocally() {
+ _("Test: Engine is disabled on remote clients and enabled locally");
+ Service.syncID = "abcdefghij";
+ let steamEngine = Service.engineManager.get("steam");
+ let stirlingEngine = Service.engineManager.get("stirling");
+ let metaWBO = new ServerWBO("global", {syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {}});
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": metaWBO.handler(),
+ "/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler(),
+ "/1.1/johndoe/storage/stirling": new ServerWBO("stirling", {}).handler()
+ });
+ setUp(server);
+
+ try {
+ _("Enable engine locally. Doing it on one is enough.");
+ steamEngine.enabled = true;
+
+ _("Sync.");
+ Service.sync();
+
+ _("Meta record now contains the new engines.");
+ do_check_true(!!metaWBO.data.engines.steam);
+ do_check_true(!!metaWBO.data.engines.stirling);
+
+ _("Engines continue to be enabled.");
+ do_check_true(steamEngine.enabled);
+ do_check_true(stirlingEngine.enabled);
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
+
+add_test(function test_dependentEnginesDisabledLocally() {
+ _("Test: Two dependent engines are enabled on remote clients and disabled locally");
+ Service.syncID = "abcdefghij";
+ let steamEngine = Service.engineManager.get("steam");
+ let stirlingEngine = Service.engineManager.get("stirling");
+ let metaWBO = new ServerWBO("global", {
+ syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {steam: {syncID: steamEngine.syncID,
+ version: steamEngine.version},
+ stirling: {syncID: stirlingEngine.syncID,
+ version: stirlingEngine.version}}
+ });
+
+ let steamCollection = new ServerWBO("steam", PAYLOAD);
+ let stirlingCollection = new ServerWBO("stirling", PAYLOAD);
+
+ let server = sync_httpd_setup({
+ "/1.1/johndoe/storage/meta/global": metaWBO.handler(),
+ "/1.1/johndoe/storage/steam": steamCollection.handler(),
+ "/1.1/johndoe/storage/stirling": stirlingCollection.handler()
+ });
+ setUp(server);
+
+ try {
+ _("Disable engines locally. Doing it on one is enough.");
+ Service._ignorePrefObserver = true;
+ steamEngine.enabled = true;
+ do_check_true(stirlingEngine.enabled);
+ Service._ignorePrefObserver = false;
+ steamEngine.enabled = false;
+ do_check_false(stirlingEngine.enabled);
+
+ _("Sync.");
+ Service.sync();
+
+ _("Meta record no longer contains engines.");
+ do_check_false(!!metaWBO.data.engines.steam);
+ do_check_false(!!metaWBO.data.engines.stirling);
+
+ _("Server records are wiped.");
+ do_check_eq(steamCollection.payload, undefined);
+ do_check_eq(stirlingCollection.payload, undefined);
+
+ _("Engines continue to be disabled.");
+ do_check_false(steamEngine.enabled);
+ do_check_false(stirlingEngine.enabled);
+ } finally {
+ Service.startOver();
+ server.stop(run_next_test);
+ }
+});
diff --git a/services/sync/tests/unit/test_service_verifyLogin.js b/services/sync/tests/unit/test_service_verifyLogin.js
new file mode 100644
index 000000000..2a27fd1b0
--- /dev/null
+++ b/services/sync/tests/unit/test_service_verifyLogin.js
@@ -0,0 +1,122 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function login_handling(handler) {
+ return function (request, response) {
+ if (basic_auth_matches(request, "johndoe", "ilovejane")) {
+ handler(request, response);
+ } else {
+ let body = "Unauthorized";
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.bodyOutputStream.write(body, body.length);
+ }
+ };
+}
+
+function service_unavailable(request, response) {
+ let body = "Service Unavailable";
+ response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
+ response.setHeader("Retry-After", "42");
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function run_test() {
+ let logger = Log.repository.rootLogger;
+ Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
+ ensureLegacyIdentityManager();
+ // This test expects a clean slate -- no saved passphrase.
+ Services.logins.removeAllLogins();
+ let johnHelper = track_collections_helper();
+ let johnU = johnHelper.with_updated_collection;
+ let johnColls = johnHelper.collections;
+
+ do_test_pending();
+
+ let server;
+ function weaveHandler (request, response) {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ let body = server.baseURI + "/api/";
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ server = httpd_setup({
+ "/api/1.1/johndoe/info/collections": login_handling(johnHelper.handler),
+ "/api/1.1/janedoe/info/collections": service_unavailable,
+
+ "/api/1.1/johndoe/storage/crypto/keys": johnU("crypto", new ServerWBO("keys").handler()),
+ "/api/1.1/johndoe/storage/meta/global": johnU("meta", new ServerWBO("global").handler()),
+ "/user/1.0/johndoe/node/weave": weaveHandler,
+ });
+
+ try {
+ Service.serverURL = server.baseURI;
+
+ _("Force the initial state.");
+ Service.status.service = STATUS_OK;
+ do_check_eq(Service.status.service, STATUS_OK);
+
+ _("Credentials won't check out because we're not configured yet.");
+ Service.status.resetSync();
+ do_check_false(Service.verifyLogin());
+ do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_NO_USERNAME);
+
+ _("Try again with username and password set.");
+ Service.status.resetSync();
+ setBasicCredentials("johndoe", "ilovejane", null);
+ do_check_false(Service.verifyLogin());
+ do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_NO_PASSPHRASE);
+
+ _("verifyLogin() has found out the user's cluster URL, though.");
+ do_check_eq(Service.clusterURL, server.baseURI + "/api/");
+
+ _("Success if passphrase is set.");
+ Service.status.resetSync();
+ Service.identity.syncKey = "foo";
+ do_check_true(Service.verifyLogin());
+ do_check_eq(Service.status.service, STATUS_OK);
+ do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
+
+ _("If verifyLogin() encounters a server error, it flips on the backoff flag and notifies observers on a 503 with Retry-After.");
+ Service.status.resetSync();
+ Service.identity.account = "janedoe";
+ Service._updateCachedURLs();
+ do_check_false(Service.status.enforceBackoff);
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
+ do_check_false(Service.verifyLogin());
+ do_check_true(Service.status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Service.status.service, LOGIN_FAILED);
+ do_check_eq(Service.status.login, SERVER_MAINTENANCE);
+
+ _("Ensure a network error when finding the cluster sets the right Status bits.");
+ Service.status.resetSync();
+ Service.serverURL = "http://localhost:12345/";
+ do_check_false(Service.verifyLogin());
+ do_check_eq(Service.status.service, LOGIN_FAILED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ _("Ensure a network error when getting the collection info sets the right Status bits.");
+ Service.status.resetSync();
+ Service.clusterURL = "http://localhost:12345/";
+ do_check_false(Service.verifyLogin());
+ do_check_eq(Service.status.service, LOGIN_FAILED);
+ do_check_eq(Service.status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ server.stop(do_test_finished);
+ }
+}
diff --git a/services/sync/tests/unit/test_service_wipeClient.js b/services/sync/tests/unit/test_service_wipeClient.js
new file mode 100644
index 000000000..aab769229
--- /dev/null
+++ b/services/sync/tests/unit/test_service_wipeClient.js
@@ -0,0 +1,112 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/identity.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.clear();
+
+function CanDecryptEngine() {
+ SyncEngine.call(this, "CanDecrypt", Service);
+}
+CanDecryptEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+
+ // Override these methods with mocks for the test
+ canDecrypt: function canDecrypt() {
+ return true;
+ },
+
+ wasWiped: false,
+ wipeClient: function wipeClient() {
+ this.wasWiped = true;
+ }
+};
+Service.engineManager.register(CanDecryptEngine);
+
+
+function CannotDecryptEngine() {
+ SyncEngine.call(this, "CannotDecrypt", Service);
+}
+CannotDecryptEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+
+ // Override these methods with mocks for the test
+ canDecrypt: function canDecrypt() {
+ return false;
+ },
+
+ wasWiped: false,
+ wipeClient: function wipeClient() {
+ this.wasWiped = true;
+ }
+};
+Service.engineManager.register(CannotDecryptEngine);
+
+
+add_test(function test_withEngineList() {
+ try {
+ _("Ensure initial scenario.");
+ do_check_false(Service.engineManager.get("candecrypt").wasWiped);
+ do_check_false(Service.engineManager.get("cannotdecrypt").wasWiped);
+
+ _("Wipe local engine data.");
+ Service.wipeClient(["candecrypt", "cannotdecrypt"]);
+
+ _("Ensure only the engine that can decrypt was wiped.");
+ do_check_true(Service.engineManager.get("candecrypt").wasWiped);
+ do_check_false(Service.engineManager.get("cannotdecrypt").wasWiped);
+ } finally {
+ Service.engineManager.get("candecrypt").wasWiped = false;
+ Service.engineManager.get("cannotdecrypt").wasWiped = false;
+ Service.startOver();
+ }
+
+ run_next_test();
+});
+
+add_test(function test_startOver_clears_keys() {
+ generateNewKeys(Service.collectionKeys);
+ do_check_true(!!Service.collectionKeys.keyForCollection());
+ Service.startOver();
+ do_check_false(!!Service.collectionKeys.keyForCollection());
+
+ run_next_test();
+});
+
+add_test(function test_credentials_preserved() {
+ _("Ensure that credentials are preserved if client is wiped.");
+
+ // Required for wipeClient().
+ ensureLegacyIdentityManager();
+ Service.identity.account = "testaccount";
+ Service.identity.basicPassword = "testpassword";
+ Service.clusterURL = "http://dummy:9000/";
+ let key = Utils.generatePassphrase();
+ Service.identity.syncKey = key;
+ Service.identity.persistCredentials();
+
+ // Simulate passwords engine wipe without all the overhead. To do this
+ // properly would require extra test infrastructure.
+ Services.logins.removeAllLogins();
+ Service.wipeClient();
+
+ let id = new IdentityManager();
+ do_check_eq(id.account, "testaccount");
+ do_check_eq(id.basicPassword, "testpassword");
+ do_check_eq(id.syncKey, key);
+
+ Service.startOver();
+
+ run_next_test();
+});
+
+function run_test() {
+ initTestLogging();
+
+ run_next_test();
+}
diff --git a/services/sync/tests/unit/test_service_wipeServer.js b/services/sync/tests/unit/test_service_wipeServer.js
new file mode 100644
index 000000000..9320f4b88
--- /dev/null
+++ b/services/sync/tests/unit/test_service_wipeServer.js
@@ -0,0 +1,242 @@
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://testing-common/services/sync/fakeservices.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Svc.DefaultPrefs.set("registerEngines", "");
+Cu.import("resource://services-sync/service.js");
+
+// configure the identity we use for this test.
+identityConfig = makeIdentityConfig({username: "johndoe"});
+
+function FakeCollection() {
+ this.deleted = false;
+}
+FakeCollection.prototype = {
+ handler: function() {
+ let self = this;
+ return function(request, response) {
+ let body = "";
+ self.timestamp = new_timestamp();
+ let timestamp = "" + self.timestamp;
+ if (request.method == "DELETE") {
+ body = timestamp;
+ self.deleted = true;
+ }
+ response.setHeader("X-Weave-Timestamp", timestamp);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ };
+ }
+};
+
+function* setUpTestFixtures(server) {
+ let cryptoService = new FakeCryptoService();
+
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+
+ yield configureIdentity(identityConfig);
+}
+
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+function promiseStopServer(server) {
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ return deferred.promise;
+}
+
+add_identity_test(this, function* test_wipeServer_list_success() {
+ _("Service.wipeServer() deletes collections given as argument.");
+
+ let steam_coll = new FakeCollection();
+ let diesel_coll = new FakeCollection();
+
+ let server = httpd_setup({
+ "/1.1/johndoe/storage/steam": steam_coll.handler(),
+ "/1.1/johndoe/storage/diesel": diesel_coll.handler(),
+ "/1.1/johndoe/storage/petrol": httpd_handler(404, "Not Found")
+ });
+
+ try {
+ yield setUpTestFixtures(server);
+ new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
+
+ _("Confirm initial environment.");
+ do_check_false(steam_coll.deleted);
+ do_check_false(diesel_coll.deleted);
+
+ _("wipeServer() will happily ignore the non-existent collection and use the timestamp of the last DELETE that was successful.");
+ let timestamp = Service.wipeServer(["steam", "diesel", "petrol"]);
+ do_check_eq(timestamp, diesel_coll.timestamp);
+
+ _("wipeServer stopped deleting after encountering an error with the 'petrol' collection, thus only 'steam' has been deleted.");
+ do_check_true(steam_coll.deleted);
+ do_check_true(diesel_coll.deleted);
+
+ } finally {
+ yield promiseStopServer(server);
+ Svc.Prefs.resetBranch("");
+ }
+});
+
+add_identity_test(this, function* test_wipeServer_list_503() {
+ _("Service.wipeServer() deletes collections given as argument.");
+
+ let steam_coll = new FakeCollection();
+ let diesel_coll = new FakeCollection();
+
+ let server = httpd_setup({
+ "/1.1/johndoe/storage/steam": steam_coll.handler(),
+ "/1.1/johndoe/storage/petrol": httpd_handler(503, "Service Unavailable"),
+ "/1.1/johndoe/storage/diesel": diesel_coll.handler()
+ });
+
+ try {
+ yield setUpTestFixtures(server);
+ new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
+
+ _("Confirm initial environment.");
+ do_check_false(steam_coll.deleted);
+ do_check_false(diesel_coll.deleted);
+
+ _("wipeServer() will happily ignore the non-existent collection, delete the 'steam' collection and abort after an receiving an error on the 'petrol' collection.");
+ let error;
+ try {
+ Service.wipeServer(["non-existent", "steam", "petrol", "diesel"]);
+ do_throw("Should have thrown!");
+ } catch(ex) {
+ error = ex;
+ }
+ _("wipeServer() threw this exception: " + error);
+ do_check_eq(error.status, 503);
+
+ _("wipeServer stopped deleting after encountering an error with the 'petrol' collection, thus only 'steam' has been deleted.");
+ do_check_true(steam_coll.deleted);
+ do_check_false(diesel_coll.deleted);
+
+ } finally {
+ yield promiseStopServer(server);
+ Svc.Prefs.resetBranch("");
+ }
+});
+
+add_identity_test(this, function* test_wipeServer_all_success() {
+ _("Service.wipeServer() deletes all the things.");
+
+ /**
+ * Handle the bulk DELETE request sent by wipeServer.
+ */
+ let deleted = false;
+ let serverTimestamp;
+ function storageHandler(request, response) {
+ do_check_eq("DELETE", request.method);
+ do_check_true(request.hasHeader("X-Confirm-Delete"));
+ deleted = true;
+ serverTimestamp = return_timestamp(request, response);
+ }
+
+ let server = httpd_setup({
+ "/1.1/johndoe/storage": storageHandler
+ });
+ yield setUpTestFixtures(server);
+
+ _("Try deletion.");
+ new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
+ let returnedTimestamp = Service.wipeServer();
+ do_check_true(deleted);
+ do_check_eq(returnedTimestamp, serverTimestamp);
+
+ yield promiseStopServer(server);
+ Svc.Prefs.resetBranch("");
+});
+
+add_identity_test(this, function* test_wipeServer_all_404() {
+ _("Service.wipeServer() accepts a 404.");
+
+ /**
+ * Handle the bulk DELETE request sent by wipeServer. Returns a 404.
+ */
+ let deleted = false;
+ let serverTimestamp;
+ function storageHandler(request, response) {
+ do_check_eq("DELETE", request.method);
+ do_check_true(request.hasHeader("X-Confirm-Delete"));
+ deleted = true;
+ serverTimestamp = new_timestamp();
+ response.setHeader("X-Weave-Timestamp", "" + serverTimestamp);
+ response.setStatusLine(request.httpVersion, 404, "Not Found");
+ }
+
+ let server = httpd_setup({
+ "/1.1/johndoe/storage": storageHandler
+ });
+ yield setUpTestFixtures(server);
+
+ _("Try deletion.");
+ new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
+ let returnedTimestamp = Service.wipeServer();
+ do_check_true(deleted);
+ do_check_eq(returnedTimestamp, serverTimestamp);
+
+ yield promiseStopServer(server);
+ Svc.Prefs.resetBranch("");
+});
+
+add_identity_test(this, function* test_wipeServer_all_503() {
+ _("Service.wipeServer() throws if it encounters a non-200/404 response.");
+
+ /**
+ * Handle the bulk DELETE request sent by wipeServer. Returns a 503.
+ */
+ function storageHandler(request, response) {
+ do_check_eq("DELETE", request.method);
+ do_check_true(request.hasHeader("X-Confirm-Delete"));
+ response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
+ }
+
+ let server = httpd_setup({
+ "/1.1/johndoe/storage": storageHandler
+ });
+ yield setUpTestFixtures(server);
+
+ _("Try deletion.");
+ let error;
+ try {
+ new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
+ Service.wipeServer();
+ do_throw("Should have thrown!");
+ } catch (ex) {
+ error = ex;
+ }
+ do_check_eq(error.status, 503);
+
+ yield promiseStopServer(server);
+ Svc.Prefs.resetBranch("");
+});
+
+add_identity_test(this, function* test_wipeServer_all_connectionRefused() {
+ _("Service.wipeServer() throws if it encounters a network problem.");
+ let server = httpd_setup({});
+ yield setUpTestFixtures(server);
+
+ Service.serverURL = "http://localhost:4352/";
+ Service.clusterURL = "http://localhost:4352/";
+
+ _("Try deletion.");
+ try {
+ Service.wipeServer();
+ do_throw("Should have thrown!");
+ } catch (ex) {
+ do_check_eq(ex.result, Cr.NS_ERROR_CONNECTION_REFUSED);
+ }
+
+ Svc.Prefs.resetBranch("");
+ yield promiseStopServer(server);
+});
diff --git a/services/sync/tests/unit/test_status.js b/services/sync/tests/unit/test_status.js
new file mode 100644
index 000000000..378aafe90
--- /dev/null
+++ b/services/sync/tests/unit/test_status.js
@@ -0,0 +1,91 @@
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/status.js");
+
+function run_test() {
+
+ // Check initial states
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.backoffInterval, 0);
+ do_check_eq(Status.minimumNextSync, 0);
+
+ do_check_eq(Status.service, STATUS_OK);
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_eq(Status.login, LOGIN_SUCCEEDED);
+ for (let name in Status.engines) {
+ do_throw('Status.engines should be empty.');
+ }
+ do_check_eq(Status.partial, false);
+
+
+ // Check login status
+ for (let code of [LOGIN_FAILED_NO_USERNAME,
+ LOGIN_FAILED_NO_PASSWORD,
+ LOGIN_FAILED_NO_PASSPHRASE]) {
+ Status.login = code;
+ do_check_eq(Status.login, code);
+ do_check_eq(Status.service, CLIENT_NOT_CONFIGURED);
+ Status.resetSync();
+ }
+
+ Status.login = LOGIN_FAILED;
+ do_check_eq(Status.login, LOGIN_FAILED);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ Status.resetSync();
+
+ Status.login = LOGIN_SUCCEEDED;
+ do_check_eq(Status.login, LOGIN_SUCCEEDED);
+ do_check_eq(Status.service, STATUS_OK);
+ Status.resetSync();
+
+
+ // Check sync status
+ Status.sync = SYNC_FAILED;
+ do_check_eq(Status.sync, SYNC_FAILED);
+ do_check_eq(Status.service, SYNC_FAILED);
+
+ Status.sync = SYNC_SUCCEEDED;
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ do_check_eq(Status.service, STATUS_OK);
+
+ Status.resetSync();
+
+
+ // Check engine status
+ Status.engines = ["testEng1", ENGINE_SUCCEEDED];
+ do_check_eq(Status.engines["testEng1"], ENGINE_SUCCEEDED);
+ do_check_eq(Status.service, STATUS_OK);
+
+ Status.engines = ["testEng2", ENGINE_DOWNLOAD_FAIL];
+ do_check_eq(Status.engines["testEng1"], ENGINE_SUCCEEDED);
+ do_check_eq(Status.engines["testEng2"], ENGINE_DOWNLOAD_FAIL);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+ Status.engines = ["testEng3", ENGINE_SUCCEEDED];
+ do_check_eq(Status.engines["testEng1"], ENGINE_SUCCEEDED);
+ do_check_eq(Status.engines["testEng2"], ENGINE_DOWNLOAD_FAIL);
+ do_check_eq(Status.engines["testEng3"], ENGINE_SUCCEEDED);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+
+ // Check resetSync
+ Status.sync = SYNC_FAILED;
+ Status.resetSync();
+
+ do_check_eq(Status.service, STATUS_OK);
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ for (name in Status.engines) {
+ do_throw('Status.engines should be empty.');
+ }
+
+
+ // Check resetBackoff
+ Status.enforceBackoff = true;
+ Status.backOffInterval = 4815162342;
+ Status.backOffInterval = 42;
+ Status.resetBackoff();
+
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.backoffInterval, 0);
+ do_check_eq(Status.minimumNextSync, 0);
+
+}
diff --git a/services/sync/tests/unit/test_status_checkSetup.js b/services/sync/tests/unit/test_status_checkSetup.js
new file mode 100644
index 000000000..64a6aac93
--- /dev/null
+++ b/services/sync/tests/unit/test_status_checkSetup.js
@@ -0,0 +1,45 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ ensureLegacyIdentityManager();
+
+ try {
+ _("Ensure fresh config.");
+ Status._authManager.deleteSyncCredentials();
+
+ _("Fresh setup, we're not configured.");
+ do_check_eq(Status.checkSetup(), CLIENT_NOT_CONFIGURED);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_USERNAME);
+ Status.resetSync();
+
+ _("Let's provide a username.");
+ Status._authManager.username = "johndoe";
+ do_check_eq(Status.checkSetup(), CLIENT_NOT_CONFIGURED);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
+ Status.resetSync();
+
+ do_check_neq(Status._authManager.username, null);
+
+ _("Let's provide a password.");
+ Status._authManager.basicPassword = "carotsalad";
+ do_check_eq(Status.checkSetup(), CLIENT_NOT_CONFIGURED);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
+ Status.resetSync();
+
+ _("Let's provide a passphrase");
+ Status._authManager.syncKey = "a-bcdef-abcde-acbde-acbde-acbde";
+ _("checkSetup()");
+ do_check_eq(Status.checkSetup(), STATUS_OK);
+ Status.resetSync();
+
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
diff --git a/services/sync/tests/unit/test_syncedtabs.js b/services/sync/tests/unit/test_syncedtabs.js
new file mode 100644
index 000000000..fe2cb6d1b
--- /dev/null
+++ b/services/sync/tests/unit/test_syncedtabs.js
@@ -0,0 +1,221 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*-
+ * vim:set ts=2 sw=2 sts=2 et:
+*/
+"use strict";
+
+Cu.import("resource://services-sync/main.js");
+Cu.import("resource://services-sync/SyncedTabs.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+
+const faviconService = Cc["@mozilla.org/browser/favicon-service;1"]
+ .getService(Ci.nsIFaviconService);
+
+Log.repository.getLogger("Sync.RemoteTabs").addAppender(new Log.DumpAppender());
+
+// A mock "Tabs" engine which the SyncedTabs module will use instead of the real
+// engine. We pass a constructor that Sync creates.
+function MockTabsEngine() {
+ this.clients = {}; // We'll set this dynamically
+}
+
+MockTabsEngine.prototype = {
+ name: "tabs",
+ enabled: true,
+
+ getAllClients() {
+ return this.clients;
+ },
+
+ getOpenURLs() {
+ return new Set();
+ },
+}
+
+// A clients engine that doesn't need to be a constructor.
+let MockClientsEngine = {
+ clientSettings: null, // Set in `configureClients`.
+
+ isMobile(guid) {
+ if (!guid.endsWith("desktop") && !guid.endsWith("mobile")) {
+ throw new Error("this module expected guids to end with 'desktop' or 'mobile'");
+ }
+ return guid.endsWith("mobile");
+ },
+ remoteClientExists(id) {
+ return this.clientSettings[id] !== false;
+ },
+ getClientName(id) {
+ if (this.clientSettings[id]) {
+ return this.clientSettings[id];
+ }
+ let engine = Weave.Service.engineManager.get("tabs");
+ return engine.clients[id].clientName;
+ },
+}
+
+// Configure Sync with our mock tabs engine and force it to become initialized.
+Services.prefs.setCharPref("services.sync.username", "someone@somewhere.com");
+
+Weave.Service.engineManager.unregister("tabs");
+Weave.Service.engineManager.register(MockTabsEngine);
+Weave.Service.clientsEngine = MockClientsEngine;
+
+// Tell the Sync XPCOM service it is initialized.
+let weaveXPCService = Cc["@mozilla.org/weave/service;1"]
+ .getService(Ci.nsISupports)
+ .wrappedJSObject;
+weaveXPCService.ready = true;
+
+function configureClients(clients, clientSettings = {}) {
+ // Configure the instance Sync created.
+ let engine = Weave.Service.engineManager.get("tabs");
+ // each client record is expected to have an id.
+ for (let [guid, client] of Object.entries(clients)) {
+ client.id = guid;
+ }
+ engine.clients = clients;
+ // Apply clients collection overrides.
+ MockClientsEngine.clientSettings = clientSettings;
+ // Send an observer that pretends the engine just finished a sync.
+ Services.obs.notifyObservers(null, "weave:engine:sync:finish", "tabs");
+}
+
+// The tests.
+add_task(function* test_noClients() {
+ // no clients, can't be tabs.
+ yield configureClients({});
+
+ let tabs = yield SyncedTabs.getTabClients();
+ equal(Object.keys(tabs).length, 0);
+});
+
+add_task(function* test_clientWithTabs() {
+ yield configureClients({
+ guid_desktop: {
+ clientName: "My Desktop",
+ tabs: [
+ {
+ urlHistory: ["http://foo.com/"],
+ icon: "http://foo.com/favicon",
+ }],
+ },
+ guid_mobile: {
+ clientName: "My Phone",
+ tabs: [],
+ }
+ });
+
+ let clients = yield SyncedTabs.getTabClients();
+ equal(clients.length, 2);
+ clients.sort((a, b) => { return a.name.localeCompare(b.name);});
+ equal(clients[0].tabs.length, 1);
+ equal(clients[0].tabs[0].url, "http://foo.com/");
+ equal(clients[0].tabs[0].icon, "http://foo.com/favicon");
+ // second client has no tabs.
+ equal(clients[1].tabs.length, 0);
+});
+
+add_task(function* test_staleClientWithTabs() {
+ yield configureClients({
+ guid_desktop: {
+ clientName: "My Desktop",
+ tabs: [
+ {
+ urlHistory: ["http://foo.com/"],
+ icon: "http://foo.com/favicon",
+ }],
+ },
+ guid_mobile: {
+ clientName: "My Phone",
+ tabs: [],
+ },
+ guid_stale_mobile: {
+ clientName: "My Deleted Phone",
+ tabs: [],
+ },
+ guid_stale_desktop: {
+ clientName: "My Deleted Laptop",
+ tabs: [
+ {
+ urlHistory: ["https://bar.com/"],
+ icon: "https://bar.com/favicon",
+ }],
+ },
+ guid_stale_name_desktop: {
+ clientName: "My Generic Device",
+ tabs: [
+ {
+ urlHistory: ["https://example.edu/"],
+ icon: "https://example.edu/favicon",
+ }],
+ },
+ }, {
+ guid_stale_mobile: false,
+ guid_stale_desktop: false,
+ // We should always use the device name from the clients collection, instead
+ // of the possibly stale tabs collection.
+ guid_stale_name_desktop: "My Laptop",
+ });
+ let clients = yield SyncedTabs.getTabClients();
+ clients.sort((a, b) => { return a.name.localeCompare(b.name);});
+ equal(clients.length, 3);
+ equal(clients[0].name, "My Desktop");
+ equal(clients[0].tabs.length, 1);
+ equal(clients[0].tabs[0].url, "http://foo.com/");
+ equal(clients[1].name, "My Laptop");
+ equal(clients[1].tabs.length, 1);
+ equal(clients[1].tabs[0].url, "https://example.edu/");
+ equal(clients[2].name, "My Phone");
+ equal(clients[2].tabs.length, 0);
+});
+
+add_task(function* test_clientWithTabsIconsDisabled() {
+ Services.prefs.setBoolPref("services.sync.syncedTabs.showRemoteIcons", false);
+ yield configureClients({
+ guid_desktop: {
+ clientName: "My Desktop",
+ tabs: [
+ {
+ urlHistory: ["http://foo.com/"],
+ icon: "http://foo.com/favicon",
+ }],
+ },
+ });
+
+ let clients = yield SyncedTabs.getTabClients();
+ equal(clients.length, 1);
+ clients.sort((a, b) => { return a.name.localeCompare(b.name);});
+ equal(clients[0].tabs.length, 1);
+ equal(clients[0].tabs[0].url, "http://foo.com/");
+ // expect the default favicon (empty string) due to the pref being false.
+ equal(clients[0].tabs[0].icon, "");
+ Services.prefs.clearUserPref("services.sync.syncedTabs.showRemoteIcons");
+});
+
+add_task(function* test_filter() {
+ // Nothing matches.
+ yield configureClients({
+ guid_desktop: {
+ clientName: "My Desktop",
+ tabs: [
+ {
+ urlHistory: ["http://foo.com/"],
+ title: "A test page.",
+ },
+ {
+ urlHistory: ["http://bar.com/"],
+ title: "Another page.",
+ }],
+ },
+ });
+
+ let clients = yield SyncedTabs.getTabClients("foo");
+ equal(clients.length, 1);
+ equal(clients[0].tabs.length, 1);
+ equal(clients[0].tabs[0].url, "http://foo.com/");
+ // check it matches the title.
+ clients = yield SyncedTabs.getTabClients("test");
+ equal(clients.length, 1);
+ equal(clients[0].tabs.length, 1);
+ equal(clients[0].tabs[0].url, "http://foo.com/");
+});
diff --git a/services/sync/tests/unit/test_syncengine.js b/services/sync/tests/unit/test_syncengine.js
new file mode 100644
index 000000000..8c01ca048
--- /dev/null
+++ b/services/sync/tests/unit/test_syncengine.js
@@ -0,0 +1,204 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function makeSteamEngine() {
+ return new SyncEngine('Steam', Service);
+}
+
+var server;
+
+function test_url_attributes() {
+ _("SyncEngine url attributes");
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.clusterURL = "https://cluster/";
+ let engine = makeSteamEngine();
+ try {
+ do_check_eq(engine.storageURL, "https://cluster/1.1/foo/storage/");
+ do_check_eq(engine.engineURL, "https://cluster/1.1/foo/storage/steam");
+ do_check_eq(engine.metaURL, "https://cluster/1.1/foo/storage/meta/global");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+function test_syncID() {
+ _("SyncEngine.syncID corresponds to preference");
+ let syncTesting = new SyncTestingInfrastructure(server);
+ let engine = makeSteamEngine();
+ try {
+ // Ensure pristine environment
+ do_check_eq(Svc.Prefs.get("steam.syncID"), undefined);
+
+ // Performing the first get on the attribute will generate a new GUID.
+ do_check_eq(engine.syncID, "fake-guid-00");
+ do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-00");
+
+ Svc.Prefs.set("steam.syncID", Utils.makeGUID());
+ do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-01");
+ do_check_eq(engine.syncID, "fake-guid-01");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+function test_lastSync() {
+ _("SyncEngine.lastSync and SyncEngine.lastSyncLocal correspond to preferences");
+ let syncTesting = new SyncTestingInfrastructure(server);
+ let engine = makeSteamEngine();
+ try {
+ // Ensure pristine environment
+ do_check_eq(Svc.Prefs.get("steam.lastSync"), undefined);
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(Svc.Prefs.get("steam.lastSyncLocal"), undefined);
+ do_check_eq(engine.lastSyncLocal, 0);
+
+ // Floats are properly stored as floats and synced with the preference
+ engine.lastSync = 123.45;
+ do_check_eq(engine.lastSync, 123.45);
+ do_check_eq(Svc.Prefs.get("steam.lastSync"), "123.45");
+
+ // Integer is properly stored
+ engine.lastSyncLocal = 67890;
+ do_check_eq(engine.lastSyncLocal, 67890);
+ do_check_eq(Svc.Prefs.get("steam.lastSyncLocal"), "67890");
+
+ // resetLastSync() resets the value (and preference) to 0
+ engine.resetLastSync();
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(Svc.Prefs.get("steam.lastSync"), "0");
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+function test_toFetch() {
+ _("SyncEngine.toFetch corresponds to file on disk");
+ let syncTesting = new SyncTestingInfrastructure(server);
+ const filename = "weave/toFetch/steam.json";
+ let engine = makeSteamEngine();
+ try {
+ // Ensure pristine environment
+ do_check_eq(engine.toFetch.length, 0);
+
+ // Write file to disk
+ let toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
+ engine.toFetch = toFetch;
+ do_check_eq(engine.toFetch, toFetch);
+ // toFetch is written asynchronously
+ engine._store._sleep(0);
+ let fakefile = syncTesting.fakeFilesystem.fakeContents[filename];
+ do_check_eq(fakefile, JSON.stringify(toFetch));
+
+ // Read file from disk
+ toFetch = [Utils.makeGUID(), Utils.makeGUID()];
+ syncTesting.fakeFilesystem.fakeContents[filename] = JSON.stringify(toFetch);
+ engine.loadToFetch();
+ do_check_eq(engine.toFetch.length, 2);
+ do_check_eq(engine.toFetch[0], toFetch[0]);
+ do_check_eq(engine.toFetch[1], toFetch[1]);
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+function test_previousFailed() {
+ _("SyncEngine.previousFailed corresponds to file on disk");
+ let syncTesting = new SyncTestingInfrastructure(server);
+ const filename = "weave/failed/steam.json";
+ let engine = makeSteamEngine();
+ try {
+ // Ensure pristine environment
+ do_check_eq(engine.previousFailed.length, 0);
+
+ // Write file to disk
+ let previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
+ engine.previousFailed = previousFailed;
+ do_check_eq(engine.previousFailed, previousFailed);
+ // previousFailed is written asynchronously
+ engine._store._sleep(0);
+ let fakefile = syncTesting.fakeFilesystem.fakeContents[filename];
+ do_check_eq(fakefile, JSON.stringify(previousFailed));
+
+ // Read file from disk
+ previousFailed = [Utils.makeGUID(), Utils.makeGUID()];
+ syncTesting.fakeFilesystem.fakeContents[filename] = JSON.stringify(previousFailed);
+ engine.loadPreviousFailed();
+ do_check_eq(engine.previousFailed.length, 2);
+ do_check_eq(engine.previousFailed[0], previousFailed[0]);
+ do_check_eq(engine.previousFailed[1], previousFailed[1]);
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+function test_resetClient() {
+ _("SyncEngine.resetClient resets lastSync and toFetch");
+ let syncTesting = new SyncTestingInfrastructure(server);
+ let engine = makeSteamEngine();
+ try {
+ // Ensure pristine environment
+ do_check_eq(Svc.Prefs.get("steam.lastSync"), undefined);
+ do_check_eq(Svc.Prefs.get("steam.lastSyncLocal"), undefined);
+ do_check_eq(engine.toFetch.length, 0);
+
+ engine.lastSync = 123.45;
+ engine.lastSyncLocal = 67890;
+ engine.toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
+ engine.previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
+
+ engine.resetClient();
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(engine.lastSyncLocal, 0);
+ do_check_eq(engine.toFetch.length, 0);
+ do_check_eq(engine.previousFailed.length, 0);
+ } finally {
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+function test_wipeServer() {
+ _("SyncEngine.wipeServer deletes server data and resets the client.");
+ let engine = makeSteamEngine();
+
+ const PAYLOAD = 42;
+ let steamCollection = new ServerWBO("steam", PAYLOAD);
+ let server = httpd_setup({
+ "/1.1/foo/storage/steam": steamCollection.handler()
+ });
+ let syncTesting = new SyncTestingInfrastructure(server);
+ do_test_pending();
+
+ try {
+ // Some data to reset.
+ engine.lastSync = 123.45;
+ engine.toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
+
+ _("Wipe server data and reset client.");
+ engine.wipeServer();
+ do_check_eq(steamCollection.payload, undefined);
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(engine.toFetch.length, 0);
+
+ } finally {
+ server.stop(do_test_finished);
+ Svc.Prefs.resetBranch("");
+ }
+}
+
+function run_test() {
+ server = httpd_setup({});
+ test_url_attributes();
+ test_syncID();
+ test_lastSync();
+ test_toFetch();
+ test_previousFailed();
+ test_resetClient();
+ test_wipeServer();
+
+ server.stop(run_next_test);
+}
diff --git a/services/sync/tests/unit/test_syncengine_sync.js b/services/sync/tests/unit/test_syncengine_sync.js
new file mode 100644
index 000000000..97289962f
--- /dev/null
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -0,0 +1,1855 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function makeRotaryEngine() {
+ return new RotaryEngine(Service);
+}
+
+function clean() {
+ Svc.Prefs.resetBranch("");
+ Svc.Prefs.set("log.logger.engine.rotary", "Trace");
+ Service.recordManager.clearCache();
+}
+
+function cleanAndGo(server) {
+ clean();
+ server.stop(run_next_test);
+}
+
+function promiseClean(server) {
+ clean();
+ return new Promise(resolve => server.stop(resolve));
+}
+
+function configureService(server, username, password) {
+ Service.clusterURL = server.baseURI;
+
+ Service.identity.account = username || "foo";
+ Service.identity.basicPassword = password || "password";
+}
+
+function createServerAndConfigureClient() {
+ let engine = new RotaryEngine(Service);
+
+ let contents = {
+ meta: {global: {engines: {rotary: {version: engine.version,
+ syncID: engine.syncID}}}},
+ crypto: {},
+ rotary: {}
+ };
+
+ const USER = "foo";
+ let server = new SyncServer();
+ server.registerUser(USER, "password");
+ server.createContents(USER, contents);
+ server.start();
+
+ Service.serverURL = server.baseURI;
+ Service.clusterURL = server.baseURI;
+ Service.identity.username = USER;
+ Service._updateCachedURLs();
+
+ return [engine, server, USER];
+}
+
+function run_test() {
+ generateNewKeys(Service.collectionKeys);
+ Svc.Prefs.set("log.logger.engine.rotary", "Trace");
+ run_next_test();
+}
+
+/*
+ * Tests
+ *
+ * SyncEngine._sync() is divided into four rather independent steps:
+ *
+ * - _syncStartup()
+ * - _processIncoming()
+ * - _uploadOutgoing()
+ * - _syncFinish()
+ *
+ * In the spirit of unit testing, these are tested individually for
+ * different scenarios below.
+ */
+
+add_test(function test_syncStartup_emptyOrOutdatedGlobalsResetsSync() {
+ _("SyncEngine._syncStartup resets sync and wipes server data if there's no or an outdated global record");
+
+ // Some server side data that's going to be wiped
+ let collection = new ServerCollection();
+ collection.insert('flying',
+ encryptPayload({id: 'flying',
+ denomination: "LNER Class A3 4472"}));
+ collection.insert('scotsman',
+ encryptPayload({id: 'scotsman',
+ denomination: "Flying Scotsman"}));
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.identity.username = "foo";
+
+ let engine = makeRotaryEngine();
+ engine._store.items = {rekolok: "Rekonstruktionslokomotive"};
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
+ let metaGlobal = Service.recordManager.get(engine.metaURL);
+ do_check_eq(metaGlobal.payload.engines, undefined);
+ do_check_true(!!collection.payload("flying"));
+ do_check_true(!!collection.payload("scotsman"));
+
+ engine.lastSync = Date.now() / 1000;
+ engine.lastSyncLocal = Date.now();
+
+ // Trying to prompt a wipe -- we no longer track CryptoMeta per engine,
+ // so it has nothing to check.
+ engine._syncStartup();
+
+ // The meta/global WBO has been filled with data about the engine
+ let engineData = metaGlobal.payload.engines["rotary"];
+ do_check_eq(engineData.version, engine.version);
+ do_check_eq(engineData.syncID, engine.syncID);
+
+ // Sync was reset and server data was wiped
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(collection.payload("flying"), undefined);
+ do_check_eq(collection.payload("scotsman"), undefined);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+add_test(function test_syncStartup_serverHasNewerVersion() {
+ _("SyncEngine._syncStartup ");
+
+ let global = new ServerWBO('global', {engines: {rotary: {version: 23456}}});
+ let server = httpd_setup({
+ "/1.1/foo/storage/meta/global": global.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.identity.username = "foo";
+
+ let engine = makeRotaryEngine();
+ try {
+
+ // The server has a newer version of the data and our engine can
+ // handle. That should give us an exception.
+ let error;
+ try {
+ engine._syncStartup();
+ } catch (ex) {
+ error = ex;
+ }
+ do_check_eq(error.failureCode, VERSION_OUT_OF_DATE);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_syncStartup_syncIDMismatchResetsClient() {
+ _("SyncEngine._syncStartup resets sync if syncIDs don't match");
+
+ let server = sync_httpd_setup({});
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.identity.username = "foo";
+
+ // global record with a different syncID than our engine has
+ let engine = makeRotaryEngine();
+ let global = new ServerWBO('global',
+ {engines: {rotary: {version: engine.version,
+ syncID: 'foobar'}}});
+ server.registerPathHandler("/1.1/foo/storage/meta/global", global.handler());
+
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine.syncID, 'fake-guid-00');
+ do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
+
+ engine.lastSync = Date.now() / 1000;
+ engine.lastSyncLocal = Date.now();
+ engine._syncStartup();
+
+ // The engine has assumed the server's syncID
+ do_check_eq(engine.syncID, 'foobar');
+
+ // Sync was reset
+ do_check_eq(engine.lastSync, 0);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_emptyServer() {
+ _("SyncEngine._processIncoming working with an empty server backend");
+
+ let collection = new ServerCollection();
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.identity.username = "foo";
+
+ let engine = makeRotaryEngine();
+ try {
+
+ // Merely ensure that this code path is run without any errors
+ engine._processIncoming();
+ do_check_eq(engine.lastSync, 0);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_createFromServer() {
+ _("SyncEngine._processIncoming creates new records from server data");
+
+ // Some server records that will be downloaded
+ let collection = new ServerCollection();
+ collection.insert('flying',
+ encryptPayload({id: 'flying',
+ denomination: "LNER Class A3 4472"}));
+ collection.insert('scotsman',
+ encryptPayload({id: 'scotsman',
+ denomination: "Flying Scotsman"}));
+
+ // Two pathological cases involving relative URIs gone wrong.
+ let pathologicalPayload = encryptPayload({id: '../pathological',
+ denomination: "Pathological Case"});
+ collection.insert('../pathological', pathologicalPayload);
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler(),
+ "/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
+ "/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.identity.username = "foo";
+
+ generateNewKeys(Service.collectionKeys);
+
+ let engine = makeRotaryEngine();
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(engine.lastModified, null);
+ do_check_eq(engine._store.items.flying, undefined);
+ do_check_eq(engine._store.items.scotsman, undefined);
+ do_check_eq(engine._store.items['../pathological'], undefined);
+
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Timestamps of last sync and last server modification are set.
+ do_check_true(engine.lastSync > 0);
+ do_check_true(engine.lastModified > 0);
+
+ // Local records have been created from the server data.
+ do_check_eq(engine._store.items.flying, "LNER Class A3 4472");
+ do_check_eq(engine._store.items.scotsman, "Flying Scotsman");
+ do_check_eq(engine._store.items['../pathological'], "Pathological Case");
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_reconcile() {
+ _("SyncEngine._processIncoming updates local records");
+
+ let collection = new ServerCollection();
+
+ // This server record is newer than the corresponding client one,
+ // so it'll update its data.
+ collection.insert('newrecord',
+ encryptPayload({id: 'newrecord',
+ denomination: "New stuff..."}));
+
+ // This server record is newer than the corresponding client one,
+ // so it'll update its data.
+ collection.insert('newerserver',
+ encryptPayload({id: 'newerserver',
+ denomination: "New data!"}));
+
+ // This server record is 2 mins older than the client counterpart
+ // but identical to it, so we're expecting the client record's
+ // changedID to be reset.
+ collection.insert('olderidentical',
+ encryptPayload({id: 'olderidentical',
+ denomination: "Older but identical"}));
+ collection._wbos.olderidentical.modified -= 120;
+
+ // This item simply has different data than the corresponding client
+ // record (which is unmodified), so it will update the client as well
+ collection.insert('updateclient',
+ encryptPayload({id: 'updateclient',
+ denomination: "Get this!"}));
+
+ // This is a dupe of 'original'.
+ collection.insert('duplication',
+ encryptPayload({id: 'duplication',
+ denomination: "Original Entry"}));
+
+ // This record is marked as deleted, so we're expecting the client
+ // record to be removed.
+ collection.insert('nukeme',
+ encryptPayload({id: 'nukeme',
+ denomination: "Nuke me!",
+ deleted: true}));
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.identity.username = "foo";
+
+ let engine = makeRotaryEngine();
+ engine._store.items = {newerserver: "New data, but not as new as server!",
+ olderidentical: "Older but identical",
+ updateclient: "Got data?",
+ original: "Original Entry",
+ long_original: "Long Original Entry",
+ nukeme: "Nuke me!"};
+ // Make this record 1 min old, thus older than the one on the server
+ engine._tracker.addChangedID('newerserver', Date.now()/1000 - 60);
+ // This record has been changed 2 mins later than the one on the server
+ engine._tracker.addChangedID('olderidentical', Date.now()/1000);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine._store.items.newrecord, undefined);
+ do_check_eq(engine._store.items.newerserver, "New data, but not as new as server!");
+ do_check_eq(engine._store.items.olderidentical, "Older but identical");
+ do_check_eq(engine._store.items.updateclient, "Got data?");
+ do_check_eq(engine._store.items.nukeme, "Nuke me!");
+ do_check_true(engine._tracker.changedIDs['olderidentical'] > 0);
+
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Timestamps of last sync and last server modification are set.
+ do_check_true(engine.lastSync > 0);
+ do_check_true(engine.lastModified > 0);
+
+ // The new record is created.
+ do_check_eq(engine._store.items.newrecord, "New stuff...");
+
+ // The 'newerserver' record is updated since the server data is newer.
+ do_check_eq(engine._store.items.newerserver, "New data!");
+
+ // The data for 'olderidentical' is identical on the server, so
+ // it's no longer marked as changed anymore.
+ do_check_eq(engine._store.items.olderidentical, "Older but identical");
+ do_check_eq(engine._tracker.changedIDs['olderidentical'], undefined);
+
+ // Updated with server data.
+ do_check_eq(engine._store.items.updateclient, "Get this!");
+
+ // The incoming ID is preferred.
+ do_check_eq(engine._store.items.original, undefined);
+ do_check_eq(engine._store.items.duplication, "Original Entry");
+ do_check_neq(engine._delete.ids.indexOf("original"), -1);
+
+ // The 'nukeme' record marked as deleted is removed.
+ do_check_eq(engine._store.items.nukeme, undefined);
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+add_test(function test_processIncoming_reconcile_local_deleted() {
+ _("Ensure local, duplicate ID is deleted on server.");
+
+ // When a duplicate is resolved, the local ID (which is never taken) should
+ // be deleted on the server.
+ let [engine, server, user] = createServerAndConfigureClient();
+
+ let now = Date.now() / 1000 - 10;
+ engine.lastSync = now;
+ engine.lastModified = now + 1;
+
+ let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
+ let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
+ server.insertWBO(user, "rotary", wbo);
+
+ record = encryptPayload({id: "DUPE_LOCAL", denomination: "local"});
+ wbo = new ServerWBO("DUPE_LOCAL", record, now - 1);
+ server.insertWBO(user, "rotary", wbo);
+
+ engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
+ do_check_true(engine._store.itemExists("DUPE_LOCAL"));
+ do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+
+ engine._sync();
+
+ do_check_attribute_count(engine._store.items, 1);
+ do_check_true("DUPE_INCOMING" in engine._store.items);
+
+ let collection = server.getCollection(user, "rotary");
+ do_check_eq(1, collection.count());
+ do_check_neq(undefined, collection.wbo("DUPE_INCOMING"));
+
+ cleanAndGo(server);
+});
+
+add_test(function test_processIncoming_reconcile_equivalent() {
+ _("Ensure proper handling of incoming records that match local.");
+
+ let [engine, server, user] = createServerAndConfigureClient();
+
+ let now = Date.now() / 1000 - 10;
+ engine.lastSync = now;
+ engine.lastModified = now + 1;
+
+ let record = encryptPayload({id: "entry", denomination: "denomination"});
+ let wbo = new ServerWBO("entry", record, now + 2);
+ server.insertWBO(user, "rotary", wbo);
+
+ engine._store.items = {entry: "denomination"};
+ do_check_true(engine._store.itemExists("entry"));
+
+ engine._sync();
+
+ do_check_attribute_count(engine._store.items, 1);
+
+ cleanAndGo(server);
+});
+
+add_test(function test_processIncoming_reconcile_locally_deleted_dupe_new() {
+ _("Ensure locally deleted duplicate record newer than incoming is handled.");
+
+ // This is a somewhat complicated test. It ensures that if a client receives
+ // a modified record for an item that is deleted locally but with a different
+ // ID that the incoming record is ignored. This is a corner case for record
+ // handling, but it needs to be supported.
+ let [engine, server, user] = createServerAndConfigureClient();
+
+ let now = Date.now() / 1000 - 10;
+ engine.lastSync = now;
+ engine.lastModified = now + 1;
+
+ let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
+ let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
+ server.insertWBO(user, "rotary", wbo);
+
+ // Simulate a locally-deleted item.
+ engine._store.items = {};
+ engine._tracker.addChangedID("DUPE_LOCAL", now + 3);
+ do_check_false(engine._store.itemExists("DUPE_LOCAL"));
+ do_check_false(engine._store.itemExists("DUPE_INCOMING"));
+ do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+
+ engine._sync();
+
+ // After the sync, the server's payload for the original ID should be marked
+ // as deleted.
+ do_check_empty(engine._store.items);
+ let collection = server.getCollection(user, "rotary");
+ do_check_eq(1, collection.count());
+ wbo = collection.wbo("DUPE_INCOMING");
+ do_check_neq(null, wbo);
+ let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
+ do_check_true(payload.deleted);
+
+ cleanAndGo(server);
+});
+
+add_test(function test_processIncoming_reconcile_locally_deleted_dupe_old() {
+ _("Ensure locally deleted duplicate record older than incoming is restored.");
+
+ // This is similar to the above test except it tests the condition where the
+ // incoming record is newer than the local deletion, therefore overriding it.
+
+ let [engine, server, user] = createServerAndConfigureClient();
+
+ let now = Date.now() / 1000 - 10;
+ engine.lastSync = now;
+ engine.lastModified = now + 1;
+
+ let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
+ let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
+ server.insertWBO(user, "rotary", wbo);
+
+ // Simulate a locally-deleted item.
+ engine._store.items = {};
+ engine._tracker.addChangedID("DUPE_LOCAL", now + 1);
+ do_check_false(engine._store.itemExists("DUPE_LOCAL"));
+ do_check_false(engine._store.itemExists("DUPE_INCOMING"));
+ do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+
+ engine._sync();
+
+ // Since the remote change is newer, the incoming item should exist locally.
+ do_check_attribute_count(engine._store.items, 1);
+ do_check_true("DUPE_INCOMING" in engine._store.items);
+ do_check_eq("incoming", engine._store.items.DUPE_INCOMING);
+
+ let collection = server.getCollection(user, "rotary");
+ do_check_eq(1, collection.count());
+ wbo = collection.wbo("DUPE_INCOMING");
+ let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
+ do_check_eq("incoming", payload.denomination);
+
+ cleanAndGo(server);
+});
+
+add_test(function test_processIncoming_reconcile_changed_dupe() {
+ _("Ensure that locally changed duplicate record is handled properly.");
+
+ let [engine, server, user] = createServerAndConfigureClient();
+
+ let now = Date.now() / 1000 - 10;
+ engine.lastSync = now;
+ engine.lastModified = now + 1;
+
+ // The local record is newer than the incoming one, so it should be retained.
+ let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
+ let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
+ server.insertWBO(user, "rotary", wbo);
+
+ engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
+ engine._tracker.addChangedID("DUPE_LOCAL", now + 3);
+ do_check_true(engine._store.itemExists("DUPE_LOCAL"));
+ do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+
+ engine._sync();
+
+ // The ID should have been changed to incoming.
+ do_check_attribute_count(engine._store.items, 1);
+ do_check_true("DUPE_INCOMING" in engine._store.items);
+
+ // On the server, the local ID should be deleted and the incoming ID should
+ // have its payload set to what was in the local record.
+ let collection = server.getCollection(user, "rotary");
+ do_check_eq(1, collection.count());
+ wbo = collection.wbo("DUPE_INCOMING");
+ do_check_neq(undefined, wbo);
+ let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
+ do_check_eq("local", payload.denomination);
+
+ cleanAndGo(server);
+});
+
+add_test(function test_processIncoming_reconcile_changed_dupe_new() {
+ _("Ensure locally changed duplicate record older than incoming is ignored.");
+
+ // This test is similar to the above except the incoming record is younger
+ // than the local record. The incoming record should be authoritative.
+ let [engine, server, user] = createServerAndConfigureClient();
+
+ let now = Date.now() / 1000 - 10;
+ engine.lastSync = now;
+ engine.lastModified = now + 1;
+
+ let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
+ let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
+ server.insertWBO(user, "rotary", wbo);
+
+ engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
+ engine._tracker.addChangedID("DUPE_LOCAL", now + 1);
+ do_check_true(engine._store.itemExists("DUPE_LOCAL"));
+ do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+
+ engine._sync();
+
+ // The ID should have been changed to incoming.
+ do_check_attribute_count(engine._store.items, 1);
+ do_check_true("DUPE_INCOMING" in engine._store.items);
+
+ // On the server, the local ID should be deleted and the incoming ID should
+ // have its payload retained.
+ let collection = server.getCollection(user, "rotary");
+ do_check_eq(1, collection.count());
+ wbo = collection.wbo("DUPE_INCOMING");
+ do_check_neq(undefined, wbo);
+ let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
+ do_check_eq("incoming", payload.denomination);
+ cleanAndGo(server);
+});
+
+add_test(function test_processIncoming_mobile_batchSize() {
+ _("SyncEngine._processIncoming doesn't fetch everything at once on mobile clients");
+
+ Svc.Prefs.set("client.type", "mobile");
+ Service.identity.username = "foo";
+
+ // A collection that logs each GET
+ let collection = new ServerCollection();
+ collection.get_log = [];
+ collection._get = collection.get;
+ collection.get = function (options) {
+ this.get_log.push(options);
+ return this._get(options);
+ };
+
+ // Let's create some 234 server side records. They're all at least
+ // 10 minutes old.
+ for (let i = 0; i < 234; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + i});
+ let wbo = new ServerWBO(id, payload);
+ wbo.modified = Date.now()/1000 - 60*(i+10);
+ collection.insertWBO(wbo);
+ }
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let engine = makeRotaryEngine();
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ _("On a mobile client, we get new records from the server in batches of 50.");
+ engine._syncStartup();
+ engine._processIncoming();
+ do_check_attribute_count(engine._store.items, 234);
+ do_check_true('record-no-0' in engine._store.items);
+ do_check_true('record-no-49' in engine._store.items);
+ do_check_true('record-no-50' in engine._store.items);
+ do_check_true('record-no-233' in engine._store.items);
+
+ // Verify that the right number of GET requests with the right
+ // kind of parameters were made.
+ do_check_eq(collection.get_log.length,
+ Math.ceil(234 / MOBILE_BATCH_SIZE) + 1);
+ do_check_eq(collection.get_log[0].full, 1);
+ do_check_eq(collection.get_log[0].limit, MOBILE_BATCH_SIZE);
+ do_check_eq(collection.get_log[1].full, undefined);
+ do_check_eq(collection.get_log[1].limit, undefined);
+ for (let i = 1; i <= Math.floor(234 / MOBILE_BATCH_SIZE); i++) {
+ do_check_eq(collection.get_log[i+1].full, 1);
+ do_check_eq(collection.get_log[i+1].limit, undefined);
+ if (i < Math.floor(234 / MOBILE_BATCH_SIZE))
+ do_check_eq(collection.get_log[i+1].ids.length, MOBILE_BATCH_SIZE);
+ else
+ do_check_eq(collection.get_log[i+1].ids.length, 234 % MOBILE_BATCH_SIZE);
+ }
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_task(function *test_processIncoming_store_toFetch() {
+ _("If processIncoming fails in the middle of a batch on mobile, state is saved in toFetch and lastSync.");
+ Service.identity.username = "foo";
+ Svc.Prefs.set("client.type", "mobile");
+
+ // A collection that throws at the fourth get.
+ let collection = new ServerCollection();
+ collection._get_calls = 0;
+ collection._get = collection.get;
+ collection.get = function() {
+ this._get_calls += 1;
+ if (this._get_calls > 3) {
+ throw "Abort on fourth call!";
+ }
+ return this._get.apply(this, arguments);
+ };
+
+ // Let's create three batches worth of server side records.
+ for (var i = 0; i < MOBILE_BATCH_SIZE * 3; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + id});
+ let wbo = new ServerWBO(id, payload);
+ wbo.modified = Date.now()/1000 + 60 * (i - MOBILE_BATCH_SIZE * 3);
+ collection.insertWBO(wbo);
+ }
+
+ let engine = makeRotaryEngine();
+ engine.enabled = true;
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine.lastSync, 0);
+ do_check_empty(engine._store.items);
+
+ let error;
+ try {
+ yield sync_engine_and_validate_telem(engine, true);
+ } catch (ex) {
+ error = ex;
+ }
+
+ // Only the first two batches have been applied.
+ do_check_eq(Object.keys(engine._store.items).length,
+ MOBILE_BATCH_SIZE * 2);
+
+ // The third batch is stuck in toFetch. lastSync has been moved forward to
+ // the last successful item's timestamp.
+ do_check_eq(engine.toFetch.length, MOBILE_BATCH_SIZE);
+ do_check_eq(engine.lastSync, collection.wbo("record-no-99").modified);
+
+ } finally {
+ yield promiseClean(server);
+ }
+});
+
+
+add_test(function test_processIncoming_resume_toFetch() {
+ _("toFetch and previousFailed items left over from previous syncs are fetched on the next sync, along with new items.");
+ Service.identity.username = "foo";
+
+ const LASTSYNC = Date.now() / 1000;
+
+ // Server records that will be downloaded
+ let collection = new ServerCollection();
+ collection.insert('flying',
+ encryptPayload({id: 'flying',
+ denomination: "LNER Class A3 4472"}));
+ collection.insert('scotsman',
+ encryptPayload({id: 'scotsman',
+ denomination: "Flying Scotsman"}));
+ collection.insert('rekolok',
+ encryptPayload({id: 'rekolok',
+ denomination: "Rekonstruktionslokomotive"}));
+ for (let i = 0; i < 3; i++) {
+ let id = 'failed' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + i});
+ let wbo = new ServerWBO(id, payload);
+ wbo.modified = LASTSYNC - 10;
+ collection.insertWBO(wbo);
+ }
+
+ collection.wbo("flying").modified =
+ collection.wbo("scotsman").modified = LASTSYNC - 10;
+ collection._wbos.rekolok.modified = LASTSYNC + 10;
+
+ // Time travel 10 seconds into the future but still download the above WBOs.
+ let engine = makeRotaryEngine();
+ engine.lastSync = LASTSYNC;
+ engine.toFetch = ["flying", "scotsman"];
+ engine.previousFailed = ["failed0", "failed1", "failed2"];
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine._store.items.flying, undefined);
+ do_check_eq(engine._store.items.scotsman, undefined);
+ do_check_eq(engine._store.items.rekolok, undefined);
+
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Local records have been created from the server data.
+ do_check_eq(engine._store.items.flying, "LNER Class A3 4472");
+ do_check_eq(engine._store.items.scotsman, "Flying Scotsman");
+ do_check_eq(engine._store.items.rekolok, "Rekonstruktionslokomotive");
+ do_check_eq(engine._store.items.failed0, "Record No. 0");
+ do_check_eq(engine._store.items.failed1, "Record No. 1");
+ do_check_eq(engine._store.items.failed2, "Record No. 2");
+ do_check_eq(engine.previousFailed.length, 0);
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_applyIncomingBatchSize_smaller() {
+ _("Ensure that a number of incoming items less than applyIncomingBatchSize is still applied.");
+ Service.identity.username = "foo";
+
+ // Engine that doesn't like the first and last record it's given.
+ const APPLY_BATCH_SIZE = 10;
+ let engine = makeRotaryEngine();
+ engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
+ engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
+ engine._store.applyIncomingBatch = function (records) {
+ let failed1 = records.shift();
+ let failed2 = records.pop();
+ this._applyIncomingBatch(records);
+ return [failed1.id, failed2.id];
+ };
+
+ // Let's create less than a batch worth of server side records.
+ let collection = new ServerCollection();
+ for (let i = 0; i < APPLY_BATCH_SIZE - 1; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + id});
+ collection.insert(id, payload);
+ }
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ try {
+
+ // Confirm initial environment
+ do_check_empty(engine._store.items);
+
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Records have been applied and the expected failures have failed.
+ do_check_attribute_count(engine._store.items, APPLY_BATCH_SIZE - 1 - 2);
+ do_check_eq(engine.toFetch.length, 0);
+ do_check_eq(engine.previousFailed.length, 2);
+ do_check_eq(engine.previousFailed[0], "record-no-0");
+ do_check_eq(engine.previousFailed[1], "record-no-8");
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_applyIncomingBatchSize_multiple() {
+ _("Ensure that incoming items are applied according to applyIncomingBatchSize.");
+ Service.identity.username = "foo";
+
+ const APPLY_BATCH_SIZE = 10;
+
+ // Engine that applies records in batches.
+ let engine = makeRotaryEngine();
+ engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
+ let batchCalls = 0;
+ engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
+ engine._store.applyIncomingBatch = function (records) {
+ batchCalls += 1;
+ do_check_eq(records.length, APPLY_BATCH_SIZE);
+ this._applyIncomingBatch.apply(this, arguments);
+ };
+
+ // Let's create three batches worth of server side records.
+ let collection = new ServerCollection();
+ for (let i = 0; i < APPLY_BATCH_SIZE * 3; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + id});
+ collection.insert(id, payload);
+ }
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ try {
+
+ // Confirm initial environment
+ do_check_empty(engine._store.items);
+
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Records have been applied in 3 batches.
+ do_check_eq(batchCalls, 3);
+ do_check_attribute_count(engine._store.items, APPLY_BATCH_SIZE * 3);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_notify_count() {
+ _("Ensure that failed records are reported only once.");
+ Service.identity.username = "foo";
+
+ const APPLY_BATCH_SIZE = 5;
+ const NUMBER_OF_RECORDS = 15;
+
+ // Engine that fails the first record.
+ let engine = makeRotaryEngine();
+ engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
+ engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
+ engine._store.applyIncomingBatch = function (records) {
+ engine._store._applyIncomingBatch(records.slice(1));
+ return [records[0].id];
+ };
+
+ // Create a batch of server side records.
+ let collection = new ServerCollection();
+ for (var i = 0; i < NUMBER_OF_RECORDS; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + id});
+ collection.insert(id, payload);
+ }
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ try {
+ // Confirm initial environment.
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(engine.toFetch.length, 0);
+ do_check_eq(engine.previousFailed.length, 0);
+ do_check_empty(engine._store.items);
+
+ let called = 0;
+ let counts;
+ function onApplied(count) {
+ _("Called with " + JSON.stringify(counts));
+ counts = count;
+ called++;
+ }
+ Svc.Obs.add("weave:engine:sync:applied", onApplied);
+
+ // Do sync.
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Confirm failures.
+ do_check_attribute_count(engine._store.items, 12);
+ do_check_eq(engine.previousFailed.length, 3);
+ do_check_eq(engine.previousFailed[0], "record-no-0");
+ do_check_eq(engine.previousFailed[1], "record-no-5");
+ do_check_eq(engine.previousFailed[2], "record-no-10");
+
+ // There are newly failed records and they are reported.
+ do_check_eq(called, 1);
+ do_check_eq(counts.failed, 3);
+ do_check_eq(counts.applied, 15);
+ do_check_eq(counts.newFailed, 3);
+ do_check_eq(counts.succeeded, 12);
+
+ // Sync again, 1 of the failed items are the same, the rest didn't fail.
+ engine._processIncoming();
+
+ // Confirming removed failures.
+ do_check_attribute_count(engine._store.items, 14);
+ do_check_eq(engine.previousFailed.length, 1);
+ do_check_eq(engine.previousFailed[0], "record-no-0");
+
+ do_check_eq(called, 2);
+ do_check_eq(counts.failed, 1);
+ do_check_eq(counts.applied, 3);
+ do_check_eq(counts.newFailed, 0);
+ do_check_eq(counts.succeeded, 2);
+
+ Svc.Obs.remove("weave:engine:sync:applied", onApplied);
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_previousFailed() {
+ _("Ensure that failed records are retried.");
+ Service.identity.username = "foo";
+ Svc.Prefs.set("client.type", "mobile");
+
+ const APPLY_BATCH_SIZE = 4;
+ const NUMBER_OF_RECORDS = 14;
+
+ // Engine that fails the first 2 records.
+ let engine = makeRotaryEngine();
+ engine.mobileGUIDFetchBatchSize = engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
+ engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
+ engine._store.applyIncomingBatch = function (records) {
+ engine._store._applyIncomingBatch(records.slice(2));
+ return [records[0].id, records[1].id];
+ };
+
+ // Create a batch of server side records.
+ let collection = new ServerCollection();
+ for (var i = 0; i < NUMBER_OF_RECORDS; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + i});
+ collection.insert(id, payload);
+ }
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ try {
+ // Confirm initial environment.
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(engine.toFetch.length, 0);
+ do_check_eq(engine.previousFailed.length, 0);
+ do_check_empty(engine._store.items);
+
+ // Initial failed items in previousFailed to be reset.
+ let previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
+ engine.previousFailed = previousFailed;
+ do_check_eq(engine.previousFailed, previousFailed);
+
+ // Do sync.
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Expected result: 4 sync batches with 2 failures each => 8 failures
+ do_check_attribute_count(engine._store.items, 6);
+ do_check_eq(engine.previousFailed.length, 8);
+ do_check_eq(engine.previousFailed[0], "record-no-0");
+ do_check_eq(engine.previousFailed[1], "record-no-1");
+ do_check_eq(engine.previousFailed[2], "record-no-4");
+ do_check_eq(engine.previousFailed[3], "record-no-5");
+ do_check_eq(engine.previousFailed[4], "record-no-8");
+ do_check_eq(engine.previousFailed[5], "record-no-9");
+ do_check_eq(engine.previousFailed[6], "record-no-12");
+ do_check_eq(engine.previousFailed[7], "record-no-13");
+
+ // Sync again with the same failed items (records 0, 1, 8, 9).
+ engine._processIncoming();
+
+ // A second sync with the same failed items should not add the same items again.
+ // Items that did not fail a second time should no longer be in previousFailed.
+ do_check_attribute_count(engine._store.items, 10);
+ do_check_eq(engine.previousFailed.length, 4);
+ do_check_eq(engine.previousFailed[0], "record-no-0");
+ do_check_eq(engine.previousFailed[1], "record-no-1");
+ do_check_eq(engine.previousFailed[2], "record-no-8");
+ do_check_eq(engine.previousFailed[3], "record-no-9");
+
+ // Refetched items that didn't fail the second time are in engine._store.items.
+ do_check_eq(engine._store.items['record-no-4'], "Record No. 4");
+ do_check_eq(engine._store.items['record-no-5'], "Record No. 5");
+ do_check_eq(engine._store.items['record-no-12'], "Record No. 12");
+ do_check_eq(engine._store.items['record-no-13'], "Record No. 13");
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_processIncoming_failed_records() {
+ _("Ensure that failed records from _reconcile and applyIncomingBatch are refetched.");
+ Service.identity.username = "foo";
+
+ // Let's create three and a bit batches worth of server side records.
+ let collection = new ServerCollection();
+ const NUMBER_OF_RECORDS = MOBILE_BATCH_SIZE * 3 + 5;
+ for (let i = 0; i < NUMBER_OF_RECORDS; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + id});
+ let wbo = new ServerWBO(id, payload);
+ wbo.modified = Date.now()/1000 + 60 * (i - MOBILE_BATCH_SIZE * 3);
+ collection.insertWBO(wbo);
+ }
+
+ // Engine that batches but likes to throw on a couple of records,
+ // two in each batch: the even ones fail in reconcile, the odd ones
+ // in applyIncoming.
+ const BOGUS_RECORDS = ["record-no-" + 42,
+ "record-no-" + 23,
+ "record-no-" + (42 + MOBILE_BATCH_SIZE),
+ "record-no-" + (23 + MOBILE_BATCH_SIZE),
+ "record-no-" + (42 + MOBILE_BATCH_SIZE * 2),
+ "record-no-" + (23 + MOBILE_BATCH_SIZE * 2),
+ "record-no-" + (2 + MOBILE_BATCH_SIZE * 3),
+ "record-no-" + (1 + MOBILE_BATCH_SIZE * 3)];
+ let engine = makeRotaryEngine();
+ engine.applyIncomingBatchSize = MOBILE_BATCH_SIZE;
+
+ engine.__reconcile = engine._reconcile;
+ engine._reconcile = function _reconcile(record) {
+ if (BOGUS_RECORDS.indexOf(record.id) % 2 == 0) {
+ throw "I don't like this record! Baaaaaah!";
+ }
+ return this.__reconcile.apply(this, arguments);
+ };
+ engine._store._applyIncoming = engine._store.applyIncoming;
+ engine._store.applyIncoming = function (record) {
+ if (BOGUS_RECORDS.indexOf(record.id) % 2 == 1) {
+ throw "I don't like this record! Baaaaaah!";
+ }
+ return this._applyIncoming.apply(this, arguments);
+ };
+
+ // Keep track of requests made of a collection.
+ let count = 0;
+ let uris = [];
+ function recording_handler(collection) {
+ let h = collection.handler();
+ return function(req, res) {
+ ++count;
+ uris.push(req.path + "?" + req.queryString);
+ return h(req, res);
+ };
+ }
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": recording_handler(collection)
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine.lastSync, 0);
+ do_check_eq(engine.toFetch.length, 0);
+ do_check_eq(engine.previousFailed.length, 0);
+ do_check_empty(engine._store.items);
+
+ let observerSubject;
+ let observerData;
+ Svc.Obs.add("weave:engine:sync:applied", function onApplied(subject, data) {
+ Svc.Obs.remove("weave:engine:sync:applied", onApplied);
+ observerSubject = subject;
+ observerData = data;
+ });
+
+ engine._syncStartup();
+ engine._processIncoming();
+
+ // Ensure that all records but the bogus 4 have been applied.
+ do_check_attribute_count(engine._store.items,
+ NUMBER_OF_RECORDS - BOGUS_RECORDS.length);
+
+ // Ensure that the bogus records will be fetched again on the next sync.
+ do_check_eq(engine.previousFailed.length, BOGUS_RECORDS.length);
+ engine.previousFailed.sort();
+ BOGUS_RECORDS.sort();
+ for (let i = 0; i < engine.previousFailed.length; i++) {
+ do_check_eq(engine.previousFailed[i], BOGUS_RECORDS[i]);
+ }
+
+ // Ensure the observer was notified
+ do_check_eq(observerData, engine.name);
+ do_check_eq(observerSubject.failed, BOGUS_RECORDS.length);
+ do_check_eq(observerSubject.newFailed, BOGUS_RECORDS.length);
+
+ // Testing batching of failed item fetches.
+ // Try to sync again. Ensure that we split the request into chunks to avoid
+ // URI length limitations.
+ function batchDownload(batchSize) {
+ count = 0;
+ uris = [];
+ engine.guidFetchBatchSize = batchSize;
+ engine._processIncoming();
+ _("Tried again. Requests: " + count + "; URIs: " + JSON.stringify(uris));
+ return count;
+ }
+
+ // There are 8 bad records, so this needs 3 fetches.
+ _("Test batching with ID batch size 3, normal mobile batch size.");
+ do_check_eq(batchDownload(3), 3);
+
+ // Now see with a more realistic limit.
+ _("Test batching with sufficient ID batch size.");
+ do_check_eq(batchDownload(BOGUS_RECORDS.length), 1);
+
+ // If we're on mobile, that limit is used by default.
+ _("Test batching with tiny mobile batch size.");
+ Svc.Prefs.set("client.type", "mobile");
+ engine.mobileGUIDFetchBatchSize = 2;
+ do_check_eq(batchDownload(BOGUS_RECORDS.length), 4);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_task(function *test_processIncoming_decrypt_failed() {
+ _("Ensure that records failing to decrypt are either replaced or refetched.");
+
+ Service.identity.username = "foo";
+
+ // Some good and some bogus records. One doesn't contain valid JSON,
+ // the other will throw during decrypt.
+ let collection = new ServerCollection();
+ collection._wbos.flying = new ServerWBO(
+ 'flying', encryptPayload({id: 'flying',
+ denomination: "LNER Class A3 4472"}));
+ collection._wbos.nojson = new ServerWBO("nojson", "This is invalid JSON");
+ collection._wbos.nojson2 = new ServerWBO("nojson2", "This is invalid JSON");
+ collection._wbos.scotsman = new ServerWBO(
+ 'scotsman', encryptPayload({id: 'scotsman',
+ denomination: "Flying Scotsman"}));
+ collection._wbos.nodecrypt = new ServerWBO("nodecrypt", "Decrypt this!");
+ collection._wbos.nodecrypt2 = new ServerWBO("nodecrypt2", "Decrypt this!");
+
+ // Patch the fake crypto service to throw on the record above.
+ Svc.Crypto._decrypt = Svc.Crypto.decrypt;
+ Svc.Crypto.decrypt = function (ciphertext) {
+ if (ciphertext == "Decrypt this!") {
+ throw "Derp! Cipher finalized failed. Im ur crypto destroyin ur recordz.";
+ }
+ return this._decrypt.apply(this, arguments);
+ };
+
+ // Some broken records also exist locally.
+ let engine = makeRotaryEngine();
+ engine.enabled = true;
+ engine._store.items = {nojson: "Valid JSON",
+ nodecrypt: "Valid ciphertext"};
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+ try {
+
+ // Confirm initial state
+ do_check_eq(engine.toFetch.length, 0);
+ do_check_eq(engine.previousFailed.length, 0);
+
+ let observerSubject;
+ let observerData;
+ Svc.Obs.add("weave:engine:sync:applied", function onApplied(subject, data) {
+ Svc.Obs.remove("weave:engine:sync:applied", onApplied);
+ observerSubject = subject;
+ observerData = data;
+ });
+
+ engine.lastSync = collection.wbo("nojson").modified - 1;
+ let ping = yield sync_engine_and_validate_telem(engine, true);
+ do_check_eq(ping.engines[0].incoming.applied, 2);
+ do_check_eq(ping.engines[0].incoming.failed, 4);
+ do_check_eq(ping.engines[0].incoming.newFailed, 4);
+
+ do_check_eq(engine.previousFailed.length, 4);
+ do_check_eq(engine.previousFailed[0], "nojson");
+ do_check_eq(engine.previousFailed[1], "nojson2");
+ do_check_eq(engine.previousFailed[2], "nodecrypt");
+ do_check_eq(engine.previousFailed[3], "nodecrypt2");
+
+ // Ensure the observer was notified
+ do_check_eq(observerData, engine.name);
+ do_check_eq(observerSubject.applied, 2);
+ do_check_eq(observerSubject.failed, 4);
+
+ } finally {
+ yield promiseClean(server);
+ }
+});
+
+
+add_test(function test_uploadOutgoing_toEmptyServer() {
+ _("SyncEngine._uploadOutgoing uploads new records to server");
+
+ Service.identity.username = "foo";
+ let collection = new ServerCollection();
+ collection._wbos.flying = new ServerWBO('flying');
+ collection._wbos.scotsman = new ServerWBO('scotsman');
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler(),
+ "/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
+ "/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ generateNewKeys(Service.collectionKeys);
+
+ let engine = makeRotaryEngine();
+ engine.lastSync = 123; // needs to be non-zero so that tracker is queried
+ engine._store.items = {flying: "LNER Class A3 4472",
+ scotsman: "Flying Scotsman"};
+ // Mark one of these records as changed
+ engine._tracker.addChangedID('scotsman', 0);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine.lastSyncLocal, 0);
+ do_check_eq(collection.payload("flying"), undefined);
+ do_check_eq(collection.payload("scotsman"), undefined);
+
+ engine._syncStartup();
+ engine._uploadOutgoing();
+
+ // Local timestamp has been set.
+ do_check_true(engine.lastSyncLocal > 0);
+
+ // Ensure the marked record ('scotsman') has been uploaded and is
+ // no longer marked.
+ do_check_eq(collection.payload("flying"), undefined);
+ do_check_true(!!collection.payload("scotsman"));
+ do_check_eq(JSON.parse(collection.wbo("scotsman").data.ciphertext).id,
+ "scotsman");
+ do_check_eq(engine._tracker.changedIDs["scotsman"], undefined);
+
+ // The 'flying' record wasn't marked so it wasn't uploaded
+ do_check_eq(collection.payload("flying"), undefined);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_task(function *test_uploadOutgoing_failed() {
+ _("SyncEngine._uploadOutgoing doesn't clear the tracker of objects that failed to upload.");
+
+ Service.identity.username = "foo";
+ let collection = new ServerCollection();
+ // We only define the "flying" WBO on the server, not the "scotsman"
+ // and "peppercorn" ones.
+ collection._wbos.flying = new ServerWBO('flying');
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let engine = makeRotaryEngine();
+ engine.lastSync = 123; // needs to be non-zero so that tracker is queried
+ engine._store.items = {flying: "LNER Class A3 4472",
+ scotsman: "Flying Scotsman",
+ peppercorn: "Peppercorn Class"};
+ // Mark these records as changed
+ const FLYING_CHANGED = 12345;
+ const SCOTSMAN_CHANGED = 23456;
+ const PEPPERCORN_CHANGED = 34567;
+ engine._tracker.addChangedID('flying', FLYING_CHANGED);
+ engine._tracker.addChangedID('scotsman', SCOTSMAN_CHANGED);
+ engine._tracker.addChangedID('peppercorn', PEPPERCORN_CHANGED);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ // Confirm initial environment
+ do_check_eq(engine.lastSyncLocal, 0);
+ do_check_eq(collection.payload("flying"), undefined);
+ do_check_eq(engine._tracker.changedIDs['flying'], FLYING_CHANGED);
+ do_check_eq(engine._tracker.changedIDs['scotsman'], SCOTSMAN_CHANGED);
+ do_check_eq(engine._tracker.changedIDs['peppercorn'], PEPPERCORN_CHANGED);
+
+ engine.enabled = true;
+ yield sync_engine_and_validate_telem(engine, true);
+
+ // Local timestamp has been set.
+ do_check_true(engine.lastSyncLocal > 0);
+
+ // Ensure the 'flying' record has been uploaded and is no longer marked.
+ do_check_true(!!collection.payload("flying"));
+ do_check_eq(engine._tracker.changedIDs['flying'], undefined);
+
+ // The 'scotsman' and 'peppercorn' records couldn't be uploaded so
+ // they weren't cleared from the tracker.
+ do_check_eq(engine._tracker.changedIDs['scotsman'], SCOTSMAN_CHANGED);
+ do_check_eq(engine._tracker.changedIDs['peppercorn'], PEPPERCORN_CHANGED);
+
+ } finally {
+ yield promiseClean(server);
+ }
+});
+
+/* A couple of "functional" tests to ensure we split records into appropriate
+ POST requests. More comprehensive unit-tests for this "batching" are in
+ test_postqueue.js.
+*/
+add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() {
+ _("SyncEngine._uploadOutgoing uploads in batches of MAX_UPLOAD_RECORDS");
+
+ Service.identity.username = "foo";
+ let collection = new ServerCollection();
+
+ // Let's count how many times the client posts to the server
+ var noOfUploads = 0;
+ collection.post = (function(orig) {
+ return function(data, request) {
+ // This test doesn't arrange for batch semantics - so we expect the
+ // first request to come in with batch=true and the others to have no
+ // batch related headers at all (as the first response did not provide
+ // a batch ID)
+ if (noOfUploads == 0) {
+ do_check_eq(request.queryString, "batch=true");
+ } else {
+ do_check_eq(request.queryString, "");
+ }
+ noOfUploads++;
+ return orig.call(this, data, request);
+ };
+ }(collection.post));
+
+ // Create a bunch of records (and server side handlers)
+ let engine = makeRotaryEngine();
+ for (var i = 0; i < 234; i++) {
+ let id = 'record-no-' + i;
+ engine._store.items[id] = "Record No. " + i;
+ engine._tracker.addChangedID(id, 0);
+ collection.insert(id);
+ }
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ try {
+
+ // Confirm initial environment.
+ do_check_eq(noOfUploads, 0);
+
+ engine._syncStartup();
+ engine._uploadOutgoing();
+
+ // Ensure all records have been uploaded.
+ for (i = 0; i < 234; i++) {
+ do_check_true(!!collection.payload('record-no-' + i));
+ }
+
+ // Ensure that the uploads were performed in batches of MAX_UPLOAD_RECORDS.
+ do_check_eq(noOfUploads, Math.ceil(234/MAX_UPLOAD_RECORDS));
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+add_test(function test_uploadOutgoing_largeRecords() {
+ _("SyncEngine._uploadOutgoing throws on records larger than MAX_UPLOAD_BYTES");
+
+ Service.identity.username = "foo";
+ let collection = new ServerCollection();
+
+ let engine = makeRotaryEngine();
+ engine.allowSkippedRecord = false;
+ engine._store.items["large-item"] = "Y".repeat(MAX_UPLOAD_BYTES*2);
+ engine._tracker.addChangedID("large-item", 0);
+ collection.insert("large-item");
+
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ try {
+ engine._syncStartup();
+ let error = null;
+ try {
+ engine._uploadOutgoing();
+ } catch (e) {
+ error = e;
+ }
+ ok(!!error);
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_syncFinish_noDelete() {
+ _("SyncEngine._syncFinish resets tracker's score");
+
+ let server = httpd_setup({});
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ let engine = makeRotaryEngine();
+ engine._delete = {}; // Nothing to delete
+ engine._tracker.score = 100;
+
+ // _syncFinish() will reset the engine's score.
+ engine._syncFinish();
+ do_check_eq(engine.score, 0);
+ server.stop(run_next_test);
+});
+
+
+add_test(function test_syncFinish_deleteByIds() {
+ _("SyncEngine._syncFinish deletes server records slated for deletion (list of record IDs).");
+
+ Service.identity.username = "foo";
+ let collection = new ServerCollection();
+ collection._wbos.flying = new ServerWBO(
+ 'flying', encryptPayload({id: 'flying',
+ denomination: "LNER Class A3 4472"}));
+ collection._wbos.scotsman = new ServerWBO(
+ 'scotsman', encryptPayload({id: 'scotsman',
+ denomination: "Flying Scotsman"}));
+ collection._wbos.rekolok = new ServerWBO(
+ 'rekolok', encryptPayload({id: 'rekolok',
+ denomination: "Rekonstruktionslokomotive"}));
+
+ let server = httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let engine = makeRotaryEngine();
+ try {
+ engine._delete = {ids: ['flying', 'rekolok']};
+ engine._syncFinish();
+
+ // The 'flying' and 'rekolok' records were deleted while the
+ // 'scotsman' one wasn't.
+ do_check_eq(collection.payload("flying"), undefined);
+ do_check_true(!!collection.payload("scotsman"));
+ do_check_eq(collection.payload("rekolok"), undefined);
+
+ // The deletion todo list has been reset.
+ do_check_eq(engine._delete.ids, undefined);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_test(function test_syncFinish_deleteLotsInBatches() {
+ _("SyncEngine._syncFinish deletes server records in batches of 100 (list of record IDs).");
+
+ Service.identity.username = "foo";
+ let collection = new ServerCollection();
+
+ // Let's count how many times the client does a DELETE request to the server
+ var noOfUploads = 0;
+ collection.delete = (function(orig) {
+ return function() {
+ noOfUploads++;
+ return orig.apply(this, arguments);
+ };
+ }(collection.delete));
+
+ // Create a bunch of records on the server
+ let now = Date.now();
+ for (var i = 0; i < 234; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + i});
+ let wbo = new ServerWBO(id, payload);
+ wbo.modified = now / 1000 - 60 * (i + 110);
+ collection.insertWBO(wbo);
+ }
+
+ let server = httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let engine = makeRotaryEngine();
+ try {
+
+ // Confirm initial environment
+ do_check_eq(noOfUploads, 0);
+
+ // Declare what we want to have deleted: all records no. 100 and
+ // up and all records that are less than 200 mins old (which are
+ // records 0 thru 90).
+ engine._delete = {ids: [],
+ newer: now / 1000 - 60 * 200.5};
+ for (i = 100; i < 234; i++) {
+ engine._delete.ids.push('record-no-' + i);
+ }
+
+ engine._syncFinish();
+
+ // Ensure that the appropriate server data has been wiped while
+ // preserving records 90 thru 200.
+ for (i = 0; i < 234; i++) {
+ let id = 'record-no-' + i;
+ if (i <= 90 || i >= 100) {
+ do_check_eq(collection.payload(id), undefined);
+ } else {
+ do_check_true(!!collection.payload(id));
+ }
+ }
+
+ // The deletion was done in batches
+ do_check_eq(noOfUploads, 2 + 1);
+
+ // The deletion todo list has been reset.
+ do_check_eq(engine._delete.ids, undefined);
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+
+add_task(function *test_sync_partialUpload() {
+ _("SyncEngine.sync() keeps changedIDs that couldn't be uploaded.");
+
+ Service.identity.username = "foo";
+
+ let collection = new ServerCollection();
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+ let syncTesting = new SyncTestingInfrastructure(server);
+ generateNewKeys(Service.collectionKeys);
+
+ let engine = makeRotaryEngine();
+ engine.lastSync = 123; // needs to be non-zero so that tracker is queried
+ engine.lastSyncLocal = 456;
+
+ // Let the third upload fail completely
+ var noOfUploads = 0;
+ collection.post = (function(orig) {
+ return function() {
+ if (noOfUploads == 2)
+ throw "FAIL!";
+ noOfUploads++;
+ return orig.apply(this, arguments);
+ };
+ }(collection.post));
+
+ // Create a bunch of records (and server side handlers)
+ for (let i = 0; i < 234; i++) {
+ let id = 'record-no-' + i;
+ engine._store.items[id] = "Record No. " + i;
+ engine._tracker.addChangedID(id, i);
+ // Let two items in the first upload batch fail.
+ if ((i != 23) && (i != 42)) {
+ collection.insert(id);
+ }
+ }
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+
+ engine.enabled = true;
+ let error;
+ try {
+ yield sync_engine_and_validate_telem(engine, true);
+ } catch (ex) {
+ error = ex;
+ }
+
+ ok(!!error);
+
+ // The timestamp has been updated.
+ do_check_true(engine.lastSyncLocal > 456);
+
+ for (let i = 0; i < 234; i++) {
+ let id = 'record-no-' + i;
+ // Ensure failed records are back in the tracker:
+ // * records no. 23 and 42 were rejected by the server,
+ // * records no. 200 and higher couldn't be uploaded because we failed
+ // hard on the 3rd upload.
+ if ((i == 23) || (i == 42) || (i >= 200))
+ do_check_eq(engine._tracker.changedIDs[id], i);
+ else
+ do_check_false(id in engine._tracker.changedIDs);
+ }
+
+ } finally {
+ yield promiseClean(server);
+ }
+});
+
+add_test(function test_canDecrypt_noCryptoKeys() {
+ _("SyncEngine.canDecrypt returns false if the engine fails to decrypt items on the server, e.g. due to a missing crypto key collection.");
+ Service.identity.username = "foo";
+
+ // Wipe collection keys so we can test the desired scenario.
+ Service.collectionKeys.clear();
+
+ let collection = new ServerCollection();
+ collection._wbos.flying = new ServerWBO(
+ 'flying', encryptPayload({id: 'flying',
+ denomination: "LNER Class A3 4472"}));
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ let engine = makeRotaryEngine();
+ try {
+
+ do_check_false(engine.canDecrypt());
+
+ } finally {
+ cleanAndGo(server);
+ }
+});
+
+add_test(function test_canDecrypt_true() {
+ _("SyncEngine.canDecrypt returns true if the engine can decrypt the items on the server.");
+ Service.identity.username = "foo";
+
+ generateNewKeys(Service.collectionKeys);
+
+ let collection = new ServerCollection();
+ collection._wbos.flying = new ServerWBO(
+ 'flying', encryptPayload({id: 'flying',
+ denomination: "LNER Class A3 4472"}));
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ let engine = makeRotaryEngine();
+ try {
+
+ do_check_true(engine.canDecrypt());
+
+ } finally {
+ cleanAndGo(server);
+ }
+
+});
+
+add_test(function test_syncapplied_observer() {
+ Service.identity.username = "foo";
+
+ const NUMBER_OF_RECORDS = 10;
+
+ let engine = makeRotaryEngine();
+
+ // Create a batch of server side records.
+ let collection = new ServerCollection();
+ for (var i = 0; i < NUMBER_OF_RECORDS; i++) {
+ let id = 'record-no-' + i;
+ let payload = encryptPayload({id: id, denomination: "Record No. " + id});
+ collection.insert(id, payload);
+ }
+
+ let server = httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ let numApplyCalls = 0;
+ let engine_name;
+ let count;
+ function onApplied(subject, data) {
+ numApplyCalls++;
+ engine_name = data;
+ count = subject;
+ }
+
+ Svc.Obs.add("weave:engine:sync:applied", onApplied);
+
+ try {
+ Service.scheduler.hasIncomingItems = false;
+
+ // Do sync.
+ engine._syncStartup();
+ engine._processIncoming();
+
+ do_check_attribute_count(engine._store.items, 10);
+
+ do_check_eq(numApplyCalls, 1);
+ do_check_eq(engine_name, "rotary");
+ do_check_eq(count.applied, 10);
+
+ do_check_true(Service.scheduler.hasIncomingItems);
+ } finally {
+ cleanAndGo(server);
+ Service.scheduler.hasIncomingItems = false;
+ Svc.Obs.remove("weave:engine:sync:applied", onApplied);
+ }
+});
diff --git a/services/sync/tests/unit/test_syncscheduler.js b/services/sync/tests/unit/test_syncscheduler.js
new file mode 100644
index 000000000..b066eae82
--- /dev/null
+++ b/services/sync/tests/unit/test_syncscheduler.js
@@ -0,0 +1,1033 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://services-sync/policies.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/status.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+Service.engineManager.clear();
+
+function CatapultEngine() {
+ SyncEngine.call(this, "Catapult", Service);
+}
+CatapultEngine.prototype = {
+ __proto__: SyncEngine.prototype,
+ exception: null, // tests fill this in
+ _sync: function _sync() {
+ throw this.exception;
+ }
+};
+
+Service.engineManager.register(CatapultEngine);
+
+var scheduler = new SyncScheduler(Service);
+var clientsEngine = Service.clientsEngine;
+
+// Don't remove stale clients when syncing. This is a test-only workaround
+// that lets us add clients directly to the store, without losing them on
+// the next sync.
+clientsEngine._removeRemoteClient = id => {};
+
+function sync_httpd_setup() {
+ let global = new ServerWBO("global", {
+ syncID: Service.syncID,
+ storageVersion: STORAGE_VERSION,
+ engines: {clients: {version: clientsEngine.version,
+ syncID: clientsEngine.syncID}}
+ });
+ let clientsColl = new ServerCollection({}, true);
+
+ // Tracking info/collections.
+ let collectionsHelper = track_collections_helper();
+ let upd = collectionsHelper.with_updated_collection;
+
+ return httpd_setup({
+ "/1.1/johndoe/storage/meta/global": upd("meta", global.handler()),
+ "/1.1/johndoe/info/collections": collectionsHelper.handler,
+ "/1.1/johndoe/storage/crypto/keys":
+ upd("crypto", (new ServerWBO("keys")).handler()),
+ "/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler()),
+ "/user/1.0/johndoe/node/weave": httpd_handler(200, "OK", "null")
+ });
+}
+
+function setUp(server) {
+ let deferred = Promise.defer();
+ configureIdentity({username: "johndoe"}).then(() => {
+ Service.clusterURL = server.baseURI + "/";
+
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ let result = serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+ deferred.resolve(result);
+ });
+ return deferred.promise;
+}
+
+function cleanUpAndGo(server) {
+ let deferred = Promise.defer();
+ Utils.nextTick(function () {
+ clientsEngine._store.wipe();
+ Service.startOver();
+ if (server) {
+ server.stop(deferred.resolve);
+ } else {
+ deferred.resolve();
+ }
+ });
+ return deferred.promise;
+}
+
+function run_test() {
+ initTestLogging("Trace");
+
+ Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
+ Log.repository.getLogger("Sync.scheduler").level = Log.Level.Trace;
+ validate_all_future_pings();
+
+ // The scheduler checks Weave.fxaEnabled to determine whether to use
+ // FxA defaults or legacy defaults. As .fxaEnabled checks the username, we
+ // set a username here then reset the default to ensure they are used.
+ ensureLegacyIdentityManager();
+ setBasicCredentials("johndoe");
+ scheduler.setDefaults();
+
+ run_next_test();
+}
+
+add_test(function test_prefAttributes() {
+ _("Test various attributes corresponding to preferences.");
+
+ const INTERVAL = 42 * 60 * 1000; // 42 minutes
+ const THRESHOLD = 3142;
+ const SCORE = 2718;
+ const TIMESTAMP1 = 1275493471649;
+
+ _("The 'nextSync' attribute stores a millisecond timestamp rounded down to the nearest second.");
+ do_check_eq(scheduler.nextSync, 0);
+ scheduler.nextSync = TIMESTAMP1;
+ do_check_eq(scheduler.nextSync, Math.floor(TIMESTAMP1 / 1000) * 1000);
+
+ _("'syncInterval' defaults to singleDeviceInterval.");
+ do_check_eq(Svc.Prefs.get('syncInterval'), undefined);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ _("'syncInterval' corresponds to a preference setting.");
+ scheduler.syncInterval = INTERVAL;
+ do_check_eq(scheduler.syncInterval, INTERVAL);
+ do_check_eq(Svc.Prefs.get('syncInterval'), INTERVAL);
+
+ _("'syncThreshold' corresponds to preference, defaults to SINGLE_USER_THRESHOLD");
+ do_check_eq(Svc.Prefs.get('syncThreshold'), undefined);
+ do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
+ scheduler.syncThreshold = THRESHOLD;
+ do_check_eq(scheduler.syncThreshold, THRESHOLD);
+
+ _("'globalScore' corresponds to preference, defaults to zero.");
+ do_check_eq(Svc.Prefs.get('globalScore'), 0);
+ do_check_eq(scheduler.globalScore, 0);
+ scheduler.globalScore = SCORE;
+ do_check_eq(scheduler.globalScore, SCORE);
+ do_check_eq(Svc.Prefs.get('globalScore'), SCORE);
+
+ _("Intervals correspond to default preferences.");
+ do_check_eq(scheduler.singleDeviceInterval,
+ Svc.Prefs.get("scheduler.sync11.singleDeviceInterval") * 1000);
+ do_check_eq(scheduler.idleInterval,
+ Svc.Prefs.get("scheduler.idleInterval") * 1000);
+ do_check_eq(scheduler.activeInterval,
+ Svc.Prefs.get("scheduler.activeInterval") * 1000);
+ do_check_eq(scheduler.immediateInterval,
+ Svc.Prefs.get("scheduler.immediateInterval") * 1000);
+
+ _("Custom values for prefs will take effect after a restart.");
+ Svc.Prefs.set("scheduler.sync11.singleDeviceInterval", 420);
+ Svc.Prefs.set("scheduler.idleInterval", 230);
+ Svc.Prefs.set("scheduler.activeInterval", 180);
+ Svc.Prefs.set("scheduler.immediateInterval", 31415);
+ scheduler.setDefaults();
+ do_check_eq(scheduler.idleInterval, 230000);
+ do_check_eq(scheduler.singleDeviceInterval, 420000);
+ do_check_eq(scheduler.activeInterval, 180000);
+ do_check_eq(scheduler.immediateInterval, 31415000);
+
+ _("Custom values for interval prefs can't be less than 60 seconds.");
+ Svc.Prefs.set("scheduler.sync11.singleDeviceInterval", 42);
+ Svc.Prefs.set("scheduler.idleInterval", 50);
+ Svc.Prefs.set("scheduler.activeInterval", 50);
+ Svc.Prefs.set("scheduler.immediateInterval", 10);
+ scheduler.setDefaults();
+ do_check_eq(scheduler.idleInterval, 60000);
+ do_check_eq(scheduler.singleDeviceInterval, 60000);
+ do_check_eq(scheduler.activeInterval, 60000);
+ do_check_eq(scheduler.immediateInterval, 60000);
+
+ Svc.Prefs.resetBranch("");
+ scheduler.setDefaults();
+ run_next_test();
+});
+
+add_identity_test(this, function* test_updateClientMode() {
+ _("Test updateClientMode adjusts scheduling attributes based on # of clients appropriately");
+ do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_false(scheduler.numClients > 1);
+ do_check_false(scheduler.idle);
+
+ // Trigger a change in interval & threshold by adding a client.
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ scheduler.updateClientMode();
+
+ do_check_eq(scheduler.syncThreshold, MULTI_DEVICE_THRESHOLD);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.idle);
+
+ // Resets the number of clients to 0.
+ clientsEngine.resetClient();
+ scheduler.updateClientMode();
+
+ // Goes back to single user if # clients is 1.
+ do_check_eq(scheduler.numClients, 1);
+ do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_false(scheduler.numClients > 1);
+ do_check_false(scheduler.idle);
+
+ yield cleanUpAndGo();
+});
+
+add_identity_test(this, function* test_masterpassword_locked_retry_interval() {
+ _("Test Status.login = MASTER_PASSWORD_LOCKED results in reschedule at MASTER_PASSWORD interval");
+ let loginFailed = false;
+ Svc.Obs.add("weave:service:login:error", function onLoginError() {
+ Svc.Obs.remove("weave:service:login:error", onLoginError);
+ loginFailed = true;
+ });
+
+ let rescheduleInterval = false;
+
+ let oldScheduleAtInterval = SyncScheduler.prototype.scheduleAtInterval;
+ SyncScheduler.prototype.scheduleAtInterval = function (interval) {
+ rescheduleInterval = true;
+ do_check_eq(interval, MASTER_PASSWORD_LOCKED_RETRY_INTERVAL);
+ };
+
+ let oldVerifyLogin = Service.verifyLogin;
+ Service.verifyLogin = function () {
+ Status.login = MASTER_PASSWORD_LOCKED;
+ return false;
+ };
+
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ Service.sync();
+
+ do_check_true(loginFailed);
+ do_check_eq(Status.login, MASTER_PASSWORD_LOCKED);
+ do_check_true(rescheduleInterval);
+
+ Service.verifyLogin = oldVerifyLogin;
+ SyncScheduler.prototype.scheduleAtInterval = oldScheduleAtInterval;
+
+ yield cleanUpAndGo(server);
+});
+
+add_identity_test(this, function* test_calculateBackoff() {
+ do_check_eq(Status.backoffInterval, 0);
+
+ // Test no interval larger than the maximum backoff is used if
+ // Status.backoffInterval is smaller.
+ Status.backoffInterval = 5;
+ let backoffInterval = Utils.calculateBackoff(50, MAXIMUM_BACKOFF_INTERVAL,
+ Status.backoffInterval);
+
+ do_check_eq(backoffInterval, MAXIMUM_BACKOFF_INTERVAL);
+
+ // Test Status.backoffInterval is used if it is
+ // larger than MAXIMUM_BACKOFF_INTERVAL.
+ Status.backoffInterval = MAXIMUM_BACKOFF_INTERVAL + 10;
+ backoffInterval = Utils.calculateBackoff(50, MAXIMUM_BACKOFF_INTERVAL,
+ Status.backoffInterval);
+
+ do_check_eq(backoffInterval, MAXIMUM_BACKOFF_INTERVAL + 10);
+
+ yield cleanUpAndGo();
+});
+
+add_identity_test(this, function* test_scheduleNextSync_nowOrPast() {
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // We're late for a sync...
+ scheduler.scheduleNextSync(-1);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_scheduleNextSync_future_noBackoff() {
+ _("scheduleNextSync() uses the current syncInterval if no interval is provided.");
+ // Test backoffInterval is 0 as expected.
+ do_check_eq(Status.backoffInterval, 0);
+
+ _("Test setting sync interval when nextSync == 0");
+ scheduler.nextSync = 0;
+ scheduler.scheduleNextSync();
+
+ // nextSync - Date.now() might be smaller than expectedInterval
+ // since some time has passed since we called scheduleNextSync().
+ do_check_true(scheduler.nextSync - Date.now()
+ <= scheduler.syncInterval);
+ do_check_eq(scheduler.syncTimer.delay, scheduler.syncInterval);
+
+ _("Test setting sync interval when nextSync != 0");
+ scheduler.nextSync = Date.now() + scheduler.singleDeviceInterval;
+ scheduler.scheduleNextSync();
+
+ // nextSync - Date.now() might be smaller than expectedInterval
+ // since some time has passed since we called scheduleNextSync().
+ do_check_true(scheduler.nextSync - Date.now()
+ <= scheduler.syncInterval);
+ do_check_true(scheduler.syncTimer.delay <= scheduler.syncInterval);
+
+ _("Scheduling requests for intervals larger than the current one will be ignored.");
+ // Request a sync at a longer interval. The sync that's already scheduled
+ // for sooner takes precedence.
+ let nextSync = scheduler.nextSync;
+ let timerDelay = scheduler.syncTimer.delay;
+ let requestedInterval = scheduler.syncInterval * 10;
+ scheduler.scheduleNextSync(requestedInterval);
+ do_check_eq(scheduler.nextSync, nextSync);
+ do_check_eq(scheduler.syncTimer.delay, timerDelay);
+
+ // We can schedule anything we want if there isn't a sync scheduled.
+ scheduler.nextSync = 0;
+ scheduler.scheduleNextSync(requestedInterval);
+ do_check_true(scheduler.nextSync <= Date.now() + requestedInterval);
+ do_check_eq(scheduler.syncTimer.delay, requestedInterval);
+
+ // Request a sync at the smallest possible interval (0 triggers now).
+ scheduler.scheduleNextSync(1);
+ do_check_true(scheduler.nextSync <= Date.now() + 1);
+ do_check_eq(scheduler.syncTimer.delay, 1);
+
+ yield cleanUpAndGo();
+});
+
+add_identity_test(this, function* test_scheduleNextSync_future_backoff() {
+ _("scheduleNextSync() will honour backoff in all scheduling requests.");
+ // Let's take a backoff interval that's bigger than the default sync interval.
+ const BACKOFF = 7337;
+ Status.backoffInterval = scheduler.syncInterval + BACKOFF;
+
+ _("Test setting sync interval when nextSync == 0");
+ scheduler.nextSync = 0;
+ scheduler.scheduleNextSync();
+
+ // nextSync - Date.now() might be smaller than expectedInterval
+ // since some time has passed since we called scheduleNextSync().
+ do_check_true(scheduler.nextSync - Date.now()
+ <= Status.backoffInterval);
+ do_check_eq(scheduler.syncTimer.delay, Status.backoffInterval);
+
+ _("Test setting sync interval when nextSync != 0");
+ scheduler.nextSync = Date.now() + scheduler.singleDeviceInterval;
+ scheduler.scheduleNextSync();
+
+ // nextSync - Date.now() might be smaller than expectedInterval
+ // since some time has passed since we called scheduleNextSync().
+ do_check_true(scheduler.nextSync - Date.now()
+ <= Status.backoffInterval);
+ do_check_true(scheduler.syncTimer.delay <= Status.backoffInterval);
+
+ // Request a sync at a longer interval. The sync that's already scheduled
+ // for sooner takes precedence.
+ let nextSync = scheduler.nextSync;
+ let timerDelay = scheduler.syncTimer.delay;
+ let requestedInterval = scheduler.syncInterval * 10;
+ do_check_true(requestedInterval > Status.backoffInterval);
+ scheduler.scheduleNextSync(requestedInterval);
+ do_check_eq(scheduler.nextSync, nextSync);
+ do_check_eq(scheduler.syncTimer.delay, timerDelay);
+
+ // We can schedule anything we want if there isn't a sync scheduled.
+ scheduler.nextSync = 0;
+ scheduler.scheduleNextSync(requestedInterval);
+ do_check_true(scheduler.nextSync <= Date.now() + requestedInterval);
+ do_check_eq(scheduler.syncTimer.delay, requestedInterval);
+
+ // Request a sync at the smallest possible interval (0 triggers now).
+ scheduler.scheduleNextSync(1);
+ do_check_true(scheduler.nextSync <= Date.now() + Status.backoffInterval);
+ do_check_eq(scheduler.syncTimer.delay, Status.backoffInterval);
+
+ yield cleanUpAndGo();
+});
+
+add_identity_test(this, function* test_handleSyncError() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Force sync to fail.
+ Svc.Prefs.set("firstSync", "notReady");
+
+ _("Ensure expected initial environment.");
+ do_check_eq(scheduler._syncErrors, 0);
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_eq(Status.backoffInterval, 0);
+
+ // Trigger sync with an error several times & observe
+ // functionality of handleSyncError()
+ _("Test first error calls scheduleNextSync on default interval");
+ Service.sync();
+ do_check_true(scheduler.nextSync <= Date.now() + scheduler.singleDeviceInterval);
+ do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
+ do_check_eq(scheduler._syncErrors, 1);
+ do_check_false(Status.enforceBackoff);
+ scheduler.syncTimer.clear();
+
+ _("Test second error still calls scheduleNextSync on default interval");
+ Service.sync();
+ do_check_true(scheduler.nextSync <= Date.now() + scheduler.singleDeviceInterval);
+ do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
+ do_check_eq(scheduler._syncErrors, 2);
+ do_check_false(Status.enforceBackoff);
+ scheduler.syncTimer.clear();
+
+ _("Test third error sets Status.enforceBackoff and calls scheduleAtInterval");
+ Service.sync();
+ let maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
+ do_check_eq(Status.backoffInterval, 0);
+ do_check_true(scheduler.nextSync <= (Date.now() + maxInterval));
+ do_check_true(scheduler.syncTimer.delay <= maxInterval);
+ do_check_eq(scheduler._syncErrors, 3);
+ do_check_true(Status.enforceBackoff);
+
+ // Status.enforceBackoff is false but there are still errors.
+ Status.resetBackoff();
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(scheduler._syncErrors, 3);
+ scheduler.syncTimer.clear();
+
+ _("Test fourth error still calls scheduleAtInterval even if enforceBackoff was reset");
+ Service.sync();
+ maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
+ do_check_true(scheduler.nextSync <= Date.now() + maxInterval);
+ do_check_true(scheduler.syncTimer.delay <= maxInterval);
+ do_check_eq(scheduler._syncErrors, 4);
+ do_check_true(Status.enforceBackoff);
+ scheduler.syncTimer.clear();
+
+ _("Arrange for a successful sync to reset the scheduler error count");
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+ Svc.Prefs.set("firstSync", "wipeRemote");
+ scheduler.scheduleNextSync(-1);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_client_sync_finish_updateClientMode() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Confirm defaults.
+ do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_false(scheduler.idle);
+
+ // Trigger a change in interval & threshold by adding a client.
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ do_check_false(scheduler.numClients > 1);
+ scheduler.updateClientMode();
+ Service.sync();
+
+ do_check_eq(scheduler.syncThreshold, MULTI_DEVICE_THRESHOLD);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+ do_check_true(scheduler.numClients > 1);
+ do_check_false(scheduler.idle);
+
+ // Resets the number of clients to 0.
+ clientsEngine.resetClient();
+ Service.sync();
+
+ // Goes back to single user if # clients is 1.
+ do_check_eq(scheduler.numClients, 1);
+ do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+ do_check_false(scheduler.numClients > 1);
+ do_check_false(scheduler.idle);
+
+ yield cleanUpAndGo(server);
+});
+
+add_identity_test(this, function* test_autoconnect_nextSync_past() {
+ let deferred = Promise.defer();
+ // nextSync will be 0 by default, so it's way in the past.
+
+ Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ scheduler.delayedAutoConnect(0);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_autoconnect_nextSync_future() {
+ let deferred = Promise.defer();
+ let previousSync = Date.now() + scheduler.syncInterval / 2;
+ scheduler.nextSync = previousSync;
+ // nextSync rounds to the nearest second.
+ let expectedSync = scheduler.nextSync;
+ let expectedInterval = expectedSync - Date.now() - 1000;
+
+ // Ensure we don't actually try to sync (or log in for that matter).
+ function onLoginStart() {
+ do_throw("Should not get here!");
+ }
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+
+ waitForZeroTimer(function () {
+ do_check_eq(scheduler.nextSync, expectedSync);
+ do_check_true(scheduler.syncTimer.delay >= expectedInterval);
+
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ cleanUpAndGo().then(deferred.resolve);
+ });
+
+ yield configureIdentity({username: "johndoe"});
+ scheduler.delayedAutoConnect(0);
+ yield deferred.promise;
+});
+
+// XXX - this test can't be run with the browserid identity as it relies
+// on the syncKey getter behaving in a certain way...
+add_task(function* test_autoconnect_mp_locked() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Pretend user did not unlock master password.
+ let origLocked = Utils.mpLocked;
+ Utils.mpLocked = () => true;
+
+ let origGetter = Service.identity.__lookupGetter__("syncKey");
+ let origSetter = Service.identity.__lookupSetter__("syncKey");
+ delete Service.identity.syncKey;
+ Service.identity.__defineGetter__("syncKey", function() {
+ _("Faking Master Password entry cancelation.");
+ throw "User canceled Master Password entry";
+ });
+
+ let deferred = Promise.defer();
+ // A locked master password will still trigger a sync, but then we'll hit
+ // MASTER_PASSWORD_LOCKED and hence MASTER_PASSWORD_LOCKED_RETRY_INTERVAL.
+ Svc.Obs.add("weave:service:login:error", function onLoginError() {
+ Svc.Obs.remove("weave:service:login:error", onLoginError);
+ Utils.nextTick(function aLittleBitAfterLoginError() {
+ do_check_eq(Status.login, MASTER_PASSWORD_LOCKED);
+
+ Utils.mpLocked = origLocked;
+ delete Service.identity.syncKey;
+ Service.identity.__defineGetter__("syncKey", origGetter);
+ Service.identity.__defineSetter__("syncKey", origSetter);
+
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+ });
+
+ scheduler.delayedAutoConnect(0);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_no_autoconnect_during_wizard() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Simulate the Sync setup wizard.
+ Svc.Prefs.set("firstSync", "notReady");
+
+ // Ensure we don't actually try to sync (or log in for that matter).
+ function onLoginStart() {
+ do_throw("Should not get here!");
+ }
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+
+ let deferred = Promise.defer();
+ waitForZeroTimer(function () {
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+
+ scheduler.delayedAutoConnect(0);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_no_autoconnect_status_not_ok() {
+ let server = sync_httpd_setup();
+
+ // Ensure we don't actually try to sync (or log in for that matter).
+ function onLoginStart() {
+ do_throw("Should not get here!");
+ }
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+
+ let deferred = Promise.defer();
+ waitForZeroTimer(function () {
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+
+ do_check_eq(Status.service, CLIENT_NOT_CONFIGURED);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_USERNAME);
+
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+
+ scheduler.delayedAutoConnect(0);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_autoconnectDelay_pref() {
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+
+ Svc.Prefs.set("autoconnectDelay", 1);
+
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ Svc.Obs.notify("weave:service:ready");
+
+ // autoconnectDelay pref is multiplied by 1000.
+ do_check_eq(scheduler._autoTimer.delay, 1000);
+ do_check_eq(Status.service, STATUS_OK);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_idle_adjustSyncInterval() {
+ // Confirm defaults.
+ do_check_eq(scheduler.idle, false);
+
+ // Single device: nothing changes.
+ scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
+ do_check_eq(scheduler.idle, true);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ // Multiple devices: switch to idle interval.
+ scheduler.idle = false;
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ scheduler.updateClientMode();
+ scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
+ do_check_eq(scheduler.idle, true);
+ do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
+
+ yield cleanUpAndGo();
+});
+
+add_identity_test(this, function* test_back_triggersSync() {
+ // Confirm defaults.
+ do_check_false(scheduler.idle);
+ do_check_eq(Status.backoffInterval, 0);
+
+ // Set up: Define 2 clients and put the system in idle.
+ scheduler.numClients = 2;
+ scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
+ do_check_true(scheduler.idle);
+
+ let deferred = Promise.defer();
+ // We don't actually expect the sync (or the login, for that matter) to
+ // succeed. We just want to ensure that it was attempted.
+ Svc.Obs.add("weave:service:login:error", function onLoginError() {
+ Svc.Obs.remove("weave:service:login:error", onLoginError);
+ cleanUpAndGo().then(deferred.resolve);
+ });
+
+ // Send an 'active' event to trigger sync soonish.
+ scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_active_triggersSync_observesBackoff() {
+ // Confirm defaults.
+ do_check_false(scheduler.idle);
+
+ // Set up: Set backoff, define 2 clients and put the system in idle.
+ const BACKOFF = 7337;
+ Status.backoffInterval = scheduler.idleInterval + BACKOFF;
+ scheduler.numClients = 2;
+ scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
+ do_check_eq(scheduler.idle, true);
+
+ function onLoginStart() {
+ do_throw("Shouldn't have kicked off a sync!");
+ }
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+
+ let deferred = Promise.defer();
+ timer = Utils.namedTimer(function () {
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+
+ do_check_true(scheduler.nextSync <= Date.now() + Status.backoffInterval);
+ do_check_eq(scheduler.syncTimer.delay, Status.backoffInterval);
+
+ cleanUpAndGo().then(deferred.resolve);
+ }, IDLE_OBSERVER_BACK_DELAY * 1.5, {}, "timer");
+
+ // Send an 'active' event to try to trigger sync soonish.
+ scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_back_debouncing() {
+ _("Ensure spurious back-then-idle events, as observed on OS X, don't trigger a sync.");
+
+ // Confirm defaults.
+ do_check_eq(scheduler.idle, false);
+
+ // Set up: Define 2 clients and put the system in idle.
+ scheduler.numClients = 2;
+ scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
+ do_check_eq(scheduler.idle, true);
+
+ function onLoginStart() {
+ do_throw("Shouldn't have kicked off a sync!");
+ }
+ Svc.Obs.add("weave:service:login:start", onLoginStart);
+
+ // Create spurious back-then-idle events as observed on OS X:
+ scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
+ scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
+
+ let deferred = Promise.defer();
+ timer = Utils.namedTimer(function () {
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ cleanUpAndGo().then(deferred.resolve);
+ }, IDLE_OBSERVER_BACK_DELAY * 1.5, {}, "timer");
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_no_sync_node() {
+ // Test when Status.sync == NO_SYNC_NODE_FOUND
+ // it is not overwritten on sync:finish
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ Service.serverURL = server.baseURI + "/";
+
+ Service.sync();
+ do_check_eq(Status.sync, NO_SYNC_NODE_FOUND);
+ do_check_eq(scheduler.syncTimer.delay, NO_SYNC_NODE_INTERVAL);
+
+ yield cleanUpAndGo(server);
+});
+
+add_identity_test(this, function* test_sync_failed_partial_500s() {
+ _("Test a 5xx status calls handleSyncError.");
+ scheduler._syncErrors = MAX_ERROR_COUNT_BEFORE_BACKOFF;
+ let server = sync_httpd_setup();
+
+ let engine = Service.engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 500};
+
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+
+ do_check_true(yield setUp(server));
+
+ Service.sync();
+
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+ let maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
+ do_check_eq(Status.backoffInterval, 0);
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(scheduler._syncErrors, 4);
+ do_check_true(scheduler.nextSync <= (Date.now() + maxInterval));
+ do_check_true(scheduler.syncTimer.delay <= maxInterval);
+
+ yield cleanUpAndGo(server);
+});
+
+add_identity_test(this, function* test_sync_failed_partial_400s() {
+ _("Test a non-5xx status doesn't call handleSyncError.");
+ scheduler._syncErrors = MAX_ERROR_COUNT_BEFORE_BACKOFF;
+ let server = sync_httpd_setup();
+
+ let engine = Service.engineManager.get("catapult");
+ engine.enabled = true;
+ engine.exception = {status: 400};
+
+ // Have multiple devices for an active interval.
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
+
+ do_check_true(yield setUp(server));
+
+ Service.sync();
+
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+
+ do_check_eq(Status.backoffInterval, 0);
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(scheduler._syncErrors, 0);
+ do_check_true(scheduler.nextSync <= (Date.now() + scheduler.activeInterval));
+ do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
+
+ yield cleanUpAndGo(server);
+});
+
+add_identity_test(this, function* test_sync_X_Weave_Backoff() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Use an odd value on purpose so that it doesn't happen to coincide with one
+ // of the sync intervals.
+ const BACKOFF = 7337;
+
+ // Extend info/collections so that we can put it into server maintenance mode.
+ const INFO_COLLECTIONS = "/1.1/johndoe/info/collections";
+ let infoColl = server._handler._overridePaths[INFO_COLLECTIONS];
+ let serverBackoff = false;
+ function infoCollWithBackoff(request, response) {
+ if (serverBackoff) {
+ response.setHeader("X-Weave-Backoff", "" + BACKOFF);
+ }
+ infoColl(request, response);
+ }
+ server.registerPathHandler(INFO_COLLECTIONS, infoCollWithBackoff);
+
+ // Pretend we have two clients so that the regular sync interval is
+ // sufficiently low.
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ let rec = clientsEngine._store.createRecord("foo", "clients");
+ rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
+ rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
+
+ // Sync once to log in and get everything set up. Let's verify our initial
+ // values.
+ Service.sync();
+ do_check_eq(Status.backoffInterval, 0);
+ do_check_eq(Status.minimumNextSync, 0);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+ do_check_true(scheduler.nextSync <=
+ Date.now() + scheduler.syncInterval);
+ // Sanity check that we picked the right value for BACKOFF:
+ do_check_true(scheduler.syncInterval < BACKOFF * 1000);
+
+ // Turn on server maintenance and sync again.
+ serverBackoff = true;
+ Service.sync();
+
+ do_check_true(Status.backoffInterval >= BACKOFF * 1000);
+ // Allowing 3 seconds worth of of leeway between when Status.minimumNextSync
+ // was set and when this line gets executed.
+ let minimumExpectedDelay = (BACKOFF - 3) * 1000;
+ do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay);
+
+ // Verify that the next sync is actually going to wait that long.
+ do_check_true(scheduler.nextSync >= Date.now() + minimumExpectedDelay);
+ do_check_true(scheduler.syncTimer.delay >= minimumExpectedDelay);
+
+ yield cleanUpAndGo(server);
+});
+
+add_identity_test(this, function* test_sync_503_Retry_After() {
+ let server = sync_httpd_setup();
+ yield setUp(server);
+
+ // Use an odd value on purpose so that it doesn't happen to coincide with one
+ // of the sync intervals.
+ const BACKOFF = 7337;
+
+ // Extend info/collections so that we can put it into server maintenance mode.
+ const INFO_COLLECTIONS = "/1.1/johndoe/info/collections";
+ let infoColl = server._handler._overridePaths[INFO_COLLECTIONS];
+ let serverMaintenance = false;
+ function infoCollWithMaintenance(request, response) {
+ if (!serverMaintenance) {
+ infoColl(request, response);
+ return;
+ }
+ response.setHeader("Retry-After", "" + BACKOFF);
+ response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
+ }
+ server.registerPathHandler(INFO_COLLECTIONS, infoCollWithMaintenance);
+
+ // Pretend we have two clients so that the regular sync interval is
+ // sufficiently low.
+ clientsEngine._store.create({id: "foo", cleartext: "bar"});
+ let rec = clientsEngine._store.createRecord("foo", "clients");
+ rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
+ rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
+
+ // Sync once to log in and get everything set up. Let's verify our initial
+ // values.
+ Service.sync();
+ do_check_false(Status.enforceBackoff);
+ do_check_eq(Status.backoffInterval, 0);
+ do_check_eq(Status.minimumNextSync, 0);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+ do_check_true(scheduler.nextSync <=
+ Date.now() + scheduler.syncInterval);
+ // Sanity check that we picked the right value for BACKOFF:
+ do_check_true(scheduler.syncInterval < BACKOFF * 1000);
+
+ // Turn on server maintenance and sync again.
+ serverMaintenance = true;
+ Service.sync();
+
+ do_check_true(Status.enforceBackoff);
+ do_check_true(Status.backoffInterval >= BACKOFF * 1000);
+ // Allowing 3 seconds worth of of leeway between when Status.minimumNextSync
+ // was set and when this line gets executed.
+ let minimumExpectedDelay = (BACKOFF - 3) * 1000;
+ do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay);
+
+ // Verify that the next sync is actually going to wait that long.
+ do_check_true(scheduler.nextSync >= Date.now() + minimumExpectedDelay);
+ do_check_true(scheduler.syncTimer.delay >= minimumExpectedDelay);
+
+ yield cleanUpAndGo(server);
+});
+
+add_identity_test(this, function* test_loginError_recoverable_reschedules() {
+ _("Verify that a recoverable login error schedules a new sync.");
+ yield configureIdentity({username: "johndoe"});
+ Service.serverURL = "http://localhost:1234/";
+ Service.clusterURL = Service.serverURL;
+ Service.persistLogin();
+ Status.resetSync(); // reset Status.login
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:login:error", function onLoginError() {
+ Svc.Obs.remove("weave:service:login:error", onLoginError);
+ Utils.nextTick(function aLittleBitAfterLoginError() {
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ let expectedNextSync = Date.now() + scheduler.syncInterval;
+ do_check_true(scheduler.nextSync > Date.now());
+ do_check_true(scheduler.nextSync <= expectedNextSync);
+ do_check_true(scheduler.syncTimer.delay > 0);
+ do_check_true(scheduler.syncTimer.delay <= scheduler.syncInterval);
+
+ Svc.Obs.remove("weave:service:sync:start", onSyncStart);
+ cleanUpAndGo().then(deferred.resolve);
+ });
+ });
+
+ // Let's set it up so that a sync is overdue, both in terms of previously
+ // scheduled syncs and the global score. We still do not expect an immediate
+ // sync because we just tried (duh).
+ scheduler.nextSync = Date.now() - 100000;
+ scheduler.globalScore = SINGLE_USER_THRESHOLD + 1;
+ function onSyncStart() {
+ do_throw("Shouldn't have started a sync!");
+ }
+ Svc.Obs.add("weave:service:sync:start", onSyncStart);
+
+ // Sanity check.
+ do_check_eq(scheduler.syncTimer, null);
+ do_check_eq(Status.checkSetup(), STATUS_OK);
+ do_check_eq(Status.login, LOGIN_SUCCEEDED);
+
+ scheduler.scheduleNextSync(0);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_loginError_fatal_clearsTriggers() {
+ _("Verify that a fatal login error clears sync triggers.");
+ yield configureIdentity({username: "johndoe"});
+
+ let server = httpd_setup({
+ "/1.1/johndoe/info/collections": httpd_handler(401, "Unauthorized")
+ });
+
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = Service.serverURL;
+ Service.persistLogin();
+ Status.resetSync(); // reset Status.login
+
+ let deferred = Promise.defer();
+ Svc.Obs.add("weave:service:login:error", function onLoginError() {
+ Svc.Obs.remove("weave:service:login:error", onLoginError);
+ Utils.nextTick(function aLittleBitAfterLoginError() {
+
+ if (isConfiguredWithLegacyIdentity()) {
+ // for the "legacy" identity, a 401 on info/collections means the
+ // password is wrong, so we enter a "login rejected" state.
+ do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED);
+
+ do_check_eq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncTimer, null);
+ } else {
+ // For the FxA identity, a 401 on info/collections means a transient
+ // error, probably due to an inability to fetch a token.
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+ // syncs should still be scheduled.
+ do_check_true(scheduler.nextSync > Date.now());
+ do_check_true(scheduler.syncTimer.delay > 0);
+ }
+ cleanUpAndGo(server).then(deferred.resolve);
+ });
+ });
+
+ // Sanity check.
+ do_check_eq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncTimer, null);
+ do_check_eq(Status.checkSetup(), STATUS_OK);
+ do_check_eq(Status.login, LOGIN_SUCCEEDED);
+
+ scheduler.scheduleNextSync(0);
+ yield deferred.promise;
+});
+
+add_identity_test(this, function* test_proper_interval_on_only_failing() {
+ _("Ensure proper behavior when only failed records are applied.");
+
+ // If an engine reports that no records succeeded, we shouldn't decrease the
+ // sync interval.
+ do_check_false(scheduler.hasIncomingItems);
+ const INTERVAL = 10000000;
+ scheduler.syncInterval = INTERVAL;
+
+ Svc.Obs.notify("weave:service:sync:applied", {
+ applied: 2,
+ succeeded: 0,
+ failed: 2,
+ newFailed: 2,
+ reconciled: 0
+ });
+
+ let deferred = Promise.defer();
+ Utils.nextTick(function() {
+ scheduler.adjustSyncInterval();
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
+
+ deferred.resolve();
+ });
+ yield deferred.promise;
+});
diff --git a/services/sync/tests/unit/test_syncstoragerequest.js b/services/sync/tests/unit/test_syncstoragerequest.js
new file mode 100644
index 000000000..14e5daade
--- /dev/null
+++ b/services/sync/tests/unit/test_syncstoragerequest.js
@@ -0,0 +1,220 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/rest.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+var httpProtocolHandler = Cc["@mozilla.org/network/protocol;1?name=http"]
+ .getService(Ci.nsIHttpProtocolHandler);
+
+function run_test() {
+ Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace;
+ initTestLogging();
+
+ ensureLegacyIdentityManager();
+
+ run_next_test();
+}
+
+add_test(function test_user_agent_desktop() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let expectedUA = Services.appinfo.name + "/" + Services.appinfo.version +
+ " (" + httpProtocolHandler.oscpu + ")" +
+ " FxSync/" + WEAVE_VERSION + "." +
+ Services.appinfo.appBuildID + ".desktop";
+
+ let request = new SyncStorageRequest(server.baseURI + "/resource");
+ request.onComplete = function onComplete(error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(handler.request.getHeader("User-Agent"), expectedUA);
+ server.stop(run_next_test);
+ };
+ do_check_eq(request.get(), request);
+});
+
+add_test(function test_user_agent_mobile() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ Svc.Prefs.set("client.type", "mobile");
+ let expectedUA = Services.appinfo.name + "/" + Services.appinfo.version +
+ " (" + httpProtocolHandler.oscpu + ")" +
+ " FxSync/" + WEAVE_VERSION + "." +
+ Services.appinfo.appBuildID + ".mobile";
+
+ let request = new SyncStorageRequest(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(handler.request.getHeader("User-Agent"), expectedUA);
+ Svc.Prefs.resetBranch("");
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_auth() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ setBasicCredentials("johndoe", "ilovejane", "XXXXXXXXX");
+
+ let request = Service.getStorageRequest(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 200);
+ do_check_true(basic_auth_matches(handler.request, "johndoe", "ilovejane"));
+
+ Svc.Prefs.reset("");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * The X-Weave-Timestamp header updates SyncStorageRequest.serverTime.
+ */
+add_test(function test_weave_timestamp() {
+ const TIMESTAMP = 1274380461;
+ function handler(request, response) {
+ response.setHeader("X-Weave-Timestamp", "" + TIMESTAMP, false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ }
+ let server = httpd_setup({"/resource": handler});
+
+ do_check_eq(SyncStorageRequest.serverTime, undefined);
+ let request = new SyncStorageRequest(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(SyncStorageRequest.serverTime, TIMESTAMP);
+ delete SyncStorageRequest.serverTime;
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * The X-Weave-Backoff header notifies an observer.
+ */
+add_test(function test_weave_backoff() {
+ function handler(request, response) {
+ response.setHeader("X-Weave-Backoff", '600', false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ }
+ let server = httpd_setup({"/resource": handler});
+
+ let backoffInterval;
+ Svc.Obs.add("weave:service:backoff:interval", function onBackoff(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", onBackoff);
+ backoffInterval = subject;
+ });
+
+ let request = new SyncStorageRequest(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(backoffInterval, 600);
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * X-Weave-Quota-Remaining header notifies observer on successful requests.
+ */
+add_test(function test_weave_quota_notice() {
+ function handler(request, response) {
+ response.setHeader("X-Weave-Quota-Remaining", '1048576', false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ }
+ let server = httpd_setup({"/resource": handler});
+
+ let quotaValue;
+ Svc.Obs.add("weave:service:quota:remaining", function onQuota(subject) {
+ Svc.Obs.remove("weave:service:quota:remaining", onQuota);
+ quotaValue = subject;
+ });
+
+ let request = new SyncStorageRequest(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(quotaValue, 1048576);
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * X-Weave-Quota-Remaining header doesn't notify observer on failed requests.
+ */
+add_test(function test_weave_quota_error() {
+ function handler(request, response) {
+ response.setHeader("X-Weave-Quota-Remaining", '1048576', false);
+ response.setStatusLine(request.httpVersion, 400, "Bad Request");
+ }
+ let server = httpd_setup({"/resource": handler});
+
+ let quotaValue;
+ function onQuota(subject) {
+ quotaValue = subject;
+ }
+ Svc.Obs.add("weave:service:quota:remaining", onQuota);
+
+ let request = new SyncStorageRequest(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 400);
+ do_check_eq(quotaValue, undefined);
+ Svc.Obs.remove("weave:service:quota:remaining", onQuota);
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_abort() {
+ function handler(request, response) {
+ response.setHeader("X-Weave-Timestamp", "" + TIMESTAMP, false);
+ response.setHeader("X-Weave-Quota-Remaining", '1048576', false);
+ response.setHeader("X-Weave-Backoff", '600', false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ }
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new SyncStorageRequest(server.baseURI + "/resource");
+
+ // Aborting a request that hasn't been sent yet is pointless and will throw.
+ do_check_throws(function () {
+ request.abort();
+ });
+
+ function throwy() {
+ do_throw("Shouldn't have gotten here!");
+ }
+
+ Svc.Obs.add("weave:service:backoff:interval", throwy);
+ Svc.Obs.add("weave:service:quota:remaining", throwy);
+ request.onProgress = request.onComplete = throwy;
+
+ request.get();
+ request.abort();
+ do_check_eq(request.status, request.ABORTED);
+
+ // Aborting an already aborted request is pointless and will throw.
+ do_check_throws(function () {
+ request.abort();
+ });
+
+ Utils.nextTick(function () {
+ // Verify that we didn't try to process any of the values.
+ do_check_eq(SyncStorageRequest.serverTime, undefined);
+
+ Svc.Obs.remove("weave:service:backoff:interval", throwy);
+ Svc.Obs.remove("weave:service:quota:remaining", throwy);
+
+ server.stop(run_next_test);
+ });
+});
diff --git a/services/sync/tests/unit/test_tab_engine.js b/services/sync/tests/unit/test_tab_engine.js
new file mode 100644
index 000000000..049250230
--- /dev/null
+++ b/services/sync/tests/unit/test_tab_engine.js
@@ -0,0 +1,141 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines/tabs.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+function getMocks() {
+ let engine = new TabEngine(Service);
+ let store = engine._store;
+ store.getTabState = mockGetTabState;
+ store.shouldSkipWindow = mockShouldSkipWindow;
+ return [engine, store];
+}
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_getOpenURLs() {
+ _("Test getOpenURLs.");
+ let [engine, store] = getMocks();
+
+ let superLongURL = "http://" + (new Array(MAX_UPLOAD_BYTES).join("w")) + ".com/";
+ let urls = ["http://bar.com", "http://foo.com", "http://foobar.com", superLongURL];
+ function fourURLs() {
+ return urls.pop();
+ }
+ store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, fourURLs, 1, 4);
+
+ let matches;
+
+ _(" test matching works (true)");
+ let openurlsset = engine.getOpenURLs();
+ matches = openurlsset.has("http://foo.com");
+ ok(matches);
+
+ _(" test matching works (false)");
+ matches = openurlsset.has("http://barfoo.com");
+ ok(!matches);
+
+ _(" test matching works (too long)");
+ matches = openurlsset.has(superLongURL);
+ ok(!matches);
+
+ run_next_test();
+});
+
+add_test(function test_tab_engine_skips_incoming_local_record() {
+ _("Ensure incoming records that match local client ID are never applied.");
+ let [engine, store] = getMocks();
+ let localID = engine.service.clientsEngine.localID;
+ let apply = store.applyIncoming;
+ let applied = [];
+
+ store.applyIncoming = function (record) {
+ notEqual(record.id, localID, "Only apply tab records from remote clients");
+ applied.push(record);
+ apply.call(store, record);
+ }
+
+ let collection = new ServerCollection();
+
+ _("Creating remote tab record with local client ID");
+ let localRecord = encryptPayload({id: localID, clientName: "local"});
+ collection.insert(localID, localRecord);
+
+ _("Creating remote tab record with a different client ID");
+ let remoteID = "different";
+ let remoteRecord = encryptPayload({id: remoteID, clientName: "not local"});
+ collection.insert(remoteID, remoteRecord);
+
+ _("Setting up Sync server");
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/tabs": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+ Service.identity.username = "foo";
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {tabs: {version: engine.version,
+ syncID: engine.syncID}};
+
+ generateNewKeys(Service.collectionKeys);
+
+ let syncFinish = engine._syncFinish;
+ engine._syncFinish = function () {
+ equal(applied.length, 1, "Remote client record was applied");
+ equal(applied[0].id, remoteID, "Remote client ID matches");
+
+ syncFinish.call(engine);
+ run_next_test();
+ }
+
+ _("Start sync");
+ engine._sync();
+});
+
+add_test(function test_reconcile() {
+ let [engine, store] = getMocks();
+
+ _("Setup engine for reconciling");
+ engine._syncStartup();
+
+ _("Create an incoming remote record");
+ let remoteRecord = {id: "remote id",
+ cleartext: "stuff and things!",
+ modified: 1000};
+
+ ok(engine._reconcile(remoteRecord), "Apply a recently modified remote record");
+
+ remoteRecord.modified = 0;
+ ok(engine._reconcile(remoteRecord), "Apply a remote record modified long ago");
+
+ // Remote tab records are never tracked locally, so the only
+ // time they're skipped is when they're marked as deleted.
+ remoteRecord.deleted = true;
+ ok(!engine._reconcile(remoteRecord), "Skip a deleted remote record");
+
+ _("Create an incoming local record");
+ // The locally tracked tab record always takes precedence over its
+ // remote counterparts.
+ let localRecord = {id: engine.service.clientsEngine.localID,
+ cleartext: "this should always be skipped",
+ modified: 2000};
+
+ ok(!engine._reconcile(localRecord), "Skip incoming local if recently modified");
+
+ localRecord.modified = 0;
+ ok(!engine._reconcile(localRecord), "Skip incoming local if modified long ago");
+
+ localRecord.deleted = true;
+ ok(!engine._reconcile(localRecord), "Skip incoming local if deleted");
+
+ run_next_test();
+});
diff --git a/services/sync/tests/unit/test_tab_store.js b/services/sync/tests/unit/test_tab_store.js
new file mode 100644
index 000000000..93b60f0c7
--- /dev/null
+++ b/services/sync/tests/unit/test_tab_store.js
@@ -0,0 +1,116 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines/tabs.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/common/utils.js");
+
+function getMockStore() {
+ let engine = new TabEngine(Service);
+ let store = engine._store;
+ store.getTabState = mockGetTabState;
+ store.shouldSkipWindow = mockShouldSkipWindow;
+ return store;
+}
+
+function test_create() {
+ let store = new TabEngine(Service)._store;
+
+ _("Create a first record");
+ let rec = {id: "id1",
+ clientName: "clientName1",
+ cleartext: { "foo": "bar" },
+ modified: 1000};
+ store.applyIncoming(rec);
+ deepEqual(store._remoteClients["id1"], { lastModified: 1000, foo: "bar" });
+
+ _("Create a second record");
+ rec = {id: "id2",
+ clientName: "clientName2",
+ cleartext: { "foo2": "bar2" },
+ modified: 2000};
+ store.applyIncoming(rec);
+ deepEqual(store._remoteClients["id2"], { lastModified: 2000, foo2: "bar2" });
+
+ _("Create a third record");
+ rec = {id: "id3",
+ clientName: "clientName3",
+ cleartext: { "foo3": "bar3" },
+ modified: 3000};
+ store.applyIncoming(rec);
+ deepEqual(store._remoteClients["id3"], { lastModified: 3000, foo3: "bar3" });
+}
+
+function test_getAllTabs() {
+ let store = getMockStore();
+ let tabs;
+
+ let threeUrls = ["http://foo.com", "http://fuubar.com", "http://barbar.com"];
+
+ store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://bar.com", 1, 1, () => 2, () => threeUrls);
+
+ _("Get all tabs.");
+ tabs = store.getAllTabs();
+ _("Tabs: " + JSON.stringify(tabs));
+ equal(tabs.length, 1);
+ equal(tabs[0].title, "title");
+ equal(tabs[0].urlHistory.length, 2);
+ equal(tabs[0].urlHistory[0], "http://foo.com");
+ equal(tabs[0].urlHistory[1], "http://bar.com");
+ equal(tabs[0].icon, "image");
+ equal(tabs[0].lastUsed, 1);
+
+ _("Get all tabs, and check that filtering works.");
+ let twoUrls = ["about:foo", "http://fuubar.com"];
+ store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1, () => 2, () => twoUrls);
+ tabs = store.getAllTabs(true);
+ _("Filtered: " + JSON.stringify(tabs));
+ equal(tabs.length, 0);
+
+ _("Get all tabs, and check that the entries safety limit works.");
+ let allURLs = [];
+ for (let i = 0; i < 50; i++) {
+ allURLs.push("http://foo" + i + ".bar");
+ }
+ allURLs.splice(35, 0, "about:foo", "about:bar", "about:foobar");
+
+ store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://bar.com", 1, 1, () => 45, () => allURLs);
+ tabs = store.getAllTabs((url) => url.startsWith("about"));
+
+ _("Sliced: " + JSON.stringify(tabs));
+ equal(tabs.length, 1);
+ equal(tabs[0].urlHistory.length, 25);
+ equal(tabs[0].urlHistory[0], "http://foo40.bar");
+ equal(tabs[0].urlHistory[24], "http://foo16.bar");
+}
+
+function test_createRecord() {
+ let store = getMockStore();
+ let record;
+
+ store.getTabState = mockGetTabState;
+ store.shouldSkipWindow = mockShouldSkipWindow;
+ store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1);
+
+ let tabs = store.getAllTabs();
+ let tabsize = JSON.stringify(tabs[0]).length;
+ let numtabs = Math.ceil(20000./77.);
+
+ store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1);
+ record = store.createRecord("fake-guid");
+ ok(record instanceof TabSetRecord);
+ equal(record.tabs.length, 1);
+
+ _("create a big record");
+ store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, numtabs);
+ record = store.createRecord("fake-guid");
+ ok(record instanceof TabSetRecord);
+ equal(record.tabs.length, 256);
+}
+
+function run_test() {
+ test_create();
+ test_getAllTabs();
+ test_createRecord();
+}
diff --git a/services/sync/tests/unit/test_tab_tracker.js b/services/sync/tests/unit/test_tab_tracker.js
new file mode 100644
index 000000000..f98920a44
--- /dev/null
+++ b/services/sync/tests/unit/test_tab_tracker.js
@@ -0,0 +1,127 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines/tabs.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+var clientsEngine = Service.clientsEngine;
+
+function fakeSvcWinMediator() {
+ // actions on windows are captured in logs
+ let logs = [];
+ delete Services.wm;
+ Services.wm = {
+ getEnumerator: function() {
+ return {
+ cnt: 2,
+ hasMoreElements: function() {
+ return this.cnt-- > 0;
+ },
+ getNext: function() {
+ let elt = {addTopics: [], remTopics: [], numAPL: 0, numRPL: 0};
+ logs.push(elt);
+ return {
+ addEventListener: function(topic) {
+ elt.addTopics.push(topic);
+ },
+ removeEventListener: function(topic) {
+ elt.remTopics.push(topic);
+ },
+ gBrowser: {
+ addProgressListener() {
+ elt.numAPL++;
+ },
+ removeProgressListener() {
+ elt.numRPL++;
+ },
+ },
+ };
+ }
+ };
+ }
+ };
+ return logs;
+}
+
+function run_test() {
+ let engine = Service.engineManager.get("tabs");
+
+ _("We assume that tabs have changed at startup.");
+ let tracker = engine._tracker;
+ tracker.persistChangedIDs = false;
+
+ do_check_true(tracker.modified);
+ do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+ [clientsEngine.localID]));
+
+ let logs;
+
+ _("Test listeners are registered on windows");
+ logs = fakeSvcWinMediator();
+ Svc.Obs.notify("weave:engine:start-tracking");
+ do_check_eq(logs.length, 2);
+ for (let log of logs) {
+ do_check_eq(log.addTopics.length, 5);
+ do_check_true(log.addTopics.indexOf("pageshow") >= 0);
+ do_check_true(log.addTopics.indexOf("TabOpen") >= 0);
+ do_check_true(log.addTopics.indexOf("TabClose") >= 0);
+ do_check_true(log.addTopics.indexOf("TabSelect") >= 0);
+ do_check_true(log.addTopics.indexOf("unload") >= 0);
+ do_check_eq(log.remTopics.length, 0);
+ do_check_eq(log.numAPL, 1, "Added 1 progress listener");
+ do_check_eq(log.numRPL, 0, "Didn't remove a progress listener");
+ }
+
+ _("Test listeners are unregistered on windows");
+ logs = fakeSvcWinMediator();
+ Svc.Obs.notify("weave:engine:stop-tracking");
+ do_check_eq(logs.length, 2);
+ for (let log of logs) {
+ do_check_eq(log.addTopics.length, 0);
+ do_check_eq(log.remTopics.length, 5);
+ do_check_true(log.remTopics.indexOf("pageshow") >= 0);
+ do_check_true(log.remTopics.indexOf("TabOpen") >= 0);
+ do_check_true(log.remTopics.indexOf("TabClose") >= 0);
+ do_check_true(log.remTopics.indexOf("TabSelect") >= 0);
+ do_check_true(log.remTopics.indexOf("unload") >= 0);
+ do_check_eq(log.numAPL, 0, "Didn't add a progress listener");
+ do_check_eq(log.numRPL, 1, "Removed 1 progress listener");
+ }
+
+ _("Test tab listener");
+ for (let evttype of ["TabOpen", "TabClose", "TabSelect"]) {
+ // Pretend we just synced.
+ tracker.clearChangedIDs();
+ do_check_false(tracker.modified);
+
+ // Send a fake tab event
+ tracker.onTab({type: evttype , originalTarget: evttype});
+ do_check_true(tracker.modified);
+ do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+ [clientsEngine.localID]));
+ }
+
+ // Pretend we just synced.
+ tracker.clearChangedIDs();
+ do_check_false(tracker.modified);
+
+ tracker.onTab({type: "pageshow", originalTarget: "pageshow"});
+ do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+ [clientsEngine.localID]));
+
+ // Pretend we just synced and saw some progress listeners.
+ tracker.clearChangedIDs();
+ do_check_false(tracker.modified);
+ tracker.onLocationChange({ isTopLevel: false }, undefined, undefined, 0);
+ do_check_false(tracker.modified, "non-toplevel request didn't flag as modified");
+
+ tracker.onLocationChange({ isTopLevel: true }, undefined, undefined,
+ Ci.nsIWebProgressListener.LOCATION_CHANGE_SAME_DOCUMENT);
+ do_check_false(tracker.modified, "location change within the same document request didn't flag as modified");
+
+ tracker.onLocationChange({ isTopLevel: true }, undefined, undefined, 0);
+ do_check_true(tracker.modified, "location change for a new top-level document flagged as modified");
+ do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+ [clientsEngine.localID]));
+}
diff --git a/services/sync/tests/unit/test_telemetry.js b/services/sync/tests/unit/test_telemetry.js
new file mode 100644
index 000000000..50a3d136b
--- /dev/null
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -0,0 +1,564 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://services-sync/telemetry.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/record.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/engines/bookmarks.js");
+Cu.import("resource://services-sync/engines/clients.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://testing-common/services/sync/fxa_utils.js");
+Cu.import("resource://testing-common/services/sync/rotaryengine.js");
+Cu.import("resource://gre/modules/osfile.jsm", this);
+
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://services-sync/util.js");
+
+initTestLogging("Trace");
+
+function SteamStore(engine) {
+ Store.call(this, "Steam", engine);
+}
+
+SteamStore.prototype = {
+ __proto__: Store.prototype,
+};
+
+function SteamTracker(name, engine) {
+ Tracker.call(this, name || "Steam", engine);
+}
+
+SteamTracker.prototype = {
+ __proto__: Tracker.prototype
+};
+
+function SteamEngine(service) {
+ Engine.call(this, "steam", service);
+}
+
+SteamEngine.prototype = {
+ __proto__: Engine.prototype,
+ _storeObj: SteamStore,
+ _trackerObj: SteamTracker,
+ _errToThrow: null,
+ _sync() {
+ if (this._errToThrow) {
+ throw this._errToThrow;
+ }
+ }
+};
+
+function BogusEngine(service) {
+ Engine.call(this, "bogus", service);
+}
+
+BogusEngine.prototype = Object.create(SteamEngine.prototype);
+
+function cleanAndGo(server) {
+ Svc.Prefs.resetBranch("");
+ Svc.Prefs.set("log.logger.engine.rotary", "Trace");
+ Service.recordManager.clearCache();
+ return new Promise(resolve => server.stop(resolve));
+}
+
+// Avoid addon manager complaining about not being initialized
+Service.engineManager.unregister("addons");
+
+add_identity_test(this, function *test_basic() {
+ let helper = track_collections_helper();
+ let upd = helper.with_updated_collection;
+
+ yield configureIdentity({ username: "johndoe" });
+ let handlers = {
+ "/1.1/johndoe/info/collections": helper.handler,
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
+ "/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler())
+ };
+
+ let collections = ["clients", "bookmarks", "forms", "history", "passwords", "prefs", "tabs"];
+
+ for (let coll of collections) {
+ handlers["/1.1/johndoe/storage/" + coll] = upd(coll, new ServerCollection({}, true).handler());
+ }
+
+ let server = httpd_setup(handlers);
+ Service.serverURL = server.baseURI;
+
+ yield sync_and_validate_telem(true);
+
+ yield new Promise(resolve => server.stop(resolve));
+});
+
+add_task(function* test_processIncoming_error() {
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {bookmarks: {version: engine.version,
+ syncID: engine.syncID}}}},
+ bookmarks: {}
+ });
+ new SyncTestingInfrastructure(server.server);
+ let collection = server.user("foo").collection("bookmarks");
+ try {
+ // Create a bogus record that when synced down will provoke a
+ // network error which in turn provokes an exception in _processIncoming.
+ const BOGUS_GUID = "zzzzzzzzzzzz";
+ let bogus_record = collection.insert(BOGUS_GUID, "I'm a bogus record!");
+ bogus_record.get = function get() {
+ throw "Sync this!";
+ };
+ // Make the 10 minutes old so it will only be synced in the toFetch phase.
+ bogus_record.modified = Date.now() / 1000 - 60 * 10;
+ engine.lastSync = Date.now() / 1000 - 60;
+ engine.toFetch = [BOGUS_GUID];
+
+ let error, ping;
+ try {
+ yield sync_engine_and_validate_telem(engine, true, errPing => ping = errPing);
+ } catch(ex) {
+ error = ex;
+ }
+ ok(!!error);
+ ok(!!ping);
+ equal(ping.uid, "0".repeat(32));
+ deepEqual(ping.failureReason, {
+ name: "othererror",
+ error: "error.engine.reason.record_download_fail"
+ });
+
+ equal(ping.engines.length, 1);
+ equal(ping.engines[0].name, "bookmarks");
+ deepEqual(ping.engines[0].failureReason, {
+ name: "othererror",
+ error: "error.engine.reason.record_download_fail"
+ });
+
+ } finally {
+ store.wipe();
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function *test_uploading() {
+ let engine = new BookmarksEngine(Service);
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {bookmarks: {version: engine.version,
+ syncID: engine.syncID}}}},
+ bookmarks: {}
+ });
+ new SyncTestingInfrastructure(server.server);
+
+ let parent = PlacesUtils.toolbarFolderId;
+ let uri = Utils.makeURI("http://getfirefox.com/");
+ let title = "Get Firefox";
+
+ let bmk_id = PlacesUtils.bookmarks.insertBookmark(parent, uri,
+ PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
+
+ let guid = store.GUIDForId(bmk_id);
+ let record = store.createRecord(guid);
+
+ let collection = server.user("foo").collection("bookmarks");
+ try {
+ let ping = yield sync_engine_and_validate_telem(engine, false);
+ ok(!!ping);
+ equal(ping.engines.length, 1);
+ equal(ping.engines[0].name, "bookmarks");
+ ok(!!ping.engines[0].outgoing);
+ greater(ping.engines[0].outgoing[0].sent, 0)
+ ok(!ping.engines[0].incoming);
+
+ PlacesUtils.bookmarks.setItemTitle(bmk_id, "New Title");
+
+ store.wipe();
+ engine.resetClient();
+
+ ping = yield sync_engine_and_validate_telem(engine, false);
+ equal(ping.engines.length, 1);
+ equal(ping.engines[0].name, "bookmarks");
+ equal(ping.engines[0].outgoing.length, 1);
+ ok(!!ping.engines[0].incoming);
+
+ } finally {
+ // Clean up.
+ store.wipe();
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function *test_upload_failed() {
+ Service.identity.username = "foo";
+ let collection = new ServerCollection();
+ collection._wbos.flying = new ServerWBO('flying');
+
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+
+ let syncTesting = new SyncTestingInfrastructure(server);
+
+ let engine = new RotaryEngine(Service);
+ engine.lastSync = 123; // needs to be non-zero so that tracker is queried
+ engine.lastSyncLocal = 456;
+ engine._store.items = {
+ flying: "LNER Class A3 4472",
+ scotsman: "Flying Scotsman",
+ peppercorn: "Peppercorn Class"
+ };
+ const FLYING_CHANGED = 12345;
+ const SCOTSMAN_CHANGED = 23456;
+ const PEPPERCORN_CHANGED = 34567;
+ engine._tracker.addChangedID("flying", FLYING_CHANGED);
+ engine._tracker.addChangedID("scotsman", SCOTSMAN_CHANGED);
+ engine._tracker.addChangedID("peppercorn", PEPPERCORN_CHANGED);
+
+ let meta_global = Service.recordManager.set(engine.metaURL, new WBORecord(engine.metaURL));
+ meta_global.payload.engines = { rotary: { version: engine.version, syncID: engine.syncID } };
+
+ try {
+ engine.enabled = true;
+ let ping = yield sync_engine_and_validate_telem(engine, true);
+ ok(!!ping);
+ equal(ping.engines.length, 1);
+ equal(ping.engines[0].incoming, null);
+ deepEqual(ping.engines[0].outgoing, [{ sent: 3, failed: 2 }]);
+ engine.lastSync = 123;
+ engine.lastSyncLocal = 456;
+
+ ping = yield sync_engine_and_validate_telem(engine, true);
+ ok(!!ping);
+ equal(ping.engines.length, 1);
+ equal(ping.engines[0].incoming.reconciled, 1);
+ deepEqual(ping.engines[0].outgoing, [{ sent: 2, failed: 2 }]);
+
+ } finally {
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function *test_sync_partialUpload() {
+ Service.identity.username = "foo";
+
+ let collection = new ServerCollection();
+ let server = sync_httpd_setup({
+ "/1.1/foo/storage/rotary": collection.handler()
+ });
+ let syncTesting = new SyncTestingInfrastructure(server);
+ generateNewKeys(Service.collectionKeys);
+
+ let engine = new RotaryEngine(Service);
+ engine.lastSync = 123;
+ engine.lastSyncLocal = 456;
+
+
+ // Create a bunch of records (and server side handlers)
+ for (let i = 0; i < 234; i++) {
+ let id = 'record-no-' + i;
+ engine._store.items[id] = "Record No. " + i;
+ engine._tracker.addChangedID(id, i);
+ // Let two items in the first upload batch fail.
+ if (i != 23 && i != 42) {
+ collection.insert(id);
+ }
+ }
+
+ let meta_global = Service.recordManager.set(engine.metaURL,
+ new WBORecord(engine.metaURL));
+ meta_global.payload.engines = {rotary: {version: engine.version,
+ syncID: engine.syncID}};
+
+ try {
+ engine.enabled = true;
+ let ping = yield sync_engine_and_validate_telem(engine, true);
+
+ ok(!!ping);
+ ok(!ping.failureReason);
+ equal(ping.engines.length, 1);
+ equal(ping.engines[0].name, "rotary");
+ ok(!ping.engines[0].incoming);
+ ok(!ping.engines[0].failureReason);
+ deepEqual(ping.engines[0].outgoing, [{ sent: 234, failed: 2 }]);
+
+ collection.post = function() { throw "Failure"; }
+
+ engine._store.items["record-no-1000"] = "Record No. 1000";
+ engine._tracker.addChangedID("record-no-1000", 1000);
+ collection.insert("record-no-1000", 1000);
+
+ engine.lastSync = 123;
+ engine.lastSyncLocal = 456;
+ ping = null;
+
+ try {
+ // should throw
+ yield sync_engine_and_validate_telem(engine, true, errPing => ping = errPing);
+ } catch (e) {}
+ // It would be nice if we had a more descriptive error for this...
+ let uploadFailureError = {
+ name: "othererror",
+ error: "error.engine.reason.record_upload_fail"
+ };
+
+ ok(!!ping);
+ deepEqual(ping.failureReason, uploadFailureError);
+ equal(ping.engines.length, 1);
+ equal(ping.engines[0].name, "rotary");
+ deepEqual(ping.engines[0].incoming, {
+ failed: 1,
+ newFailed: 1,
+ reconciled: 232
+ });
+ ok(!ping.engines[0].outgoing);
+ deepEqual(ping.engines[0].failureReason, uploadFailureError);
+
+ } finally {
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function* test_generic_engine_fail() {
+ Service.engineManager.register(SteamEngine);
+ let engine = Service.engineManager.get("steam");
+ engine.enabled = true;
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {steam: {version: engine.version,
+ syncID: engine.syncID}}}},
+ steam: {}
+ });
+ new SyncTestingInfrastructure(server.server);
+ let e = new Error("generic failure message")
+ engine._errToThrow = e;
+
+ try {
+ let ping = yield sync_and_validate_telem(true);
+ equal(ping.status.service, SYNC_FAILED_PARTIAL);
+ deepEqual(ping.engines.find(e => e.name === "steam").failureReason, {
+ name: "unexpectederror",
+ error: String(e)
+ });
+ } finally {
+ Service.engineManager.unregister(engine);
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function* test_engine_fail_ioerror() {
+ Service.engineManager.register(SteamEngine);
+ let engine = Service.engineManager.get("steam");
+ engine.enabled = true;
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {steam: {version: engine.version,
+ syncID: engine.syncID}}}},
+ steam: {}
+ });
+ new SyncTestingInfrastructure(server.server);
+ // create an IOError to re-throw as part of Sync.
+ try {
+ // (Note that fakeservices.js has replaced Utils.jsonMove etc, but for
+ // this test we need the real one so we get real exceptions from the
+ // filesystem.)
+ yield Utils._real_jsonMove("file-does-not-exist", "anything", {});
+ } catch (ex) {
+ engine._errToThrow = ex;
+ }
+ ok(engine._errToThrow, "expecting exception");
+
+ try {
+ let ping = yield sync_and_validate_telem(true);
+ equal(ping.status.service, SYNC_FAILED_PARTIAL);
+ let failureReason = ping.engines.find(e => e.name === "steam").failureReason;
+ equal(failureReason.name, "unexpectederror");
+ // ensure the profile dir in the exception message has been stripped.
+ ok(!failureReason.error.includes(OS.Constants.Path.profileDir), failureReason.error);
+ ok(failureReason.error.includes("[profileDir]"), failureReason.error);
+ } finally {
+ Service.engineManager.unregister(engine);
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function* test_initial_sync_engines() {
+ Service.engineManager.register(SteamEngine);
+ let engine = Service.engineManager.get("steam");
+ engine.enabled = true;
+ let store = engine._store;
+ let engines = {};
+ // These are the only ones who actually have things to sync at startup.
+ let engineNames = ["clients", "bookmarks", "prefs", "tabs"];
+ let conf = { meta: { global: { engines } } };
+ for (let e of engineNames) {
+ engines[e] = { version: engine.version, syncID: engine.syncID };
+ conf[e] = {};
+ }
+ let server = serverForUsers({"foo": "password"}, conf);
+ new SyncTestingInfrastructure(server.server);
+ try {
+ let ping = yield wait_for_ping(() => Service.sync(), true);
+
+ equal(ping.engines.find(e => e.name === "clients").outgoing[0].sent, 1);
+ equal(ping.engines.find(e => e.name === "tabs").outgoing[0].sent, 1);
+
+ // for the rest we don't care about specifics
+ for (let e of ping.engines) {
+ if (!engineNames.includes(engine.name)) {
+ continue;
+ }
+ greaterOrEqual(e.took, 1);
+ ok(!!e.outgoing)
+ equal(e.outgoing.length, 1);
+ notEqual(e.outgoing[0].sent, undefined);
+ equal(e.outgoing[0].failed, undefined);
+ }
+ } finally {
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function* test_nserror() {
+ Service.engineManager.register(SteamEngine);
+ let engine = Service.engineManager.get("steam");
+ engine.enabled = true;
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {steam: {version: engine.version,
+ syncID: engine.syncID}}}},
+ steam: {}
+ });
+ new SyncTestingInfrastructure(server.server);
+ engine._errToThrow = Components.Exception("NS_ERROR_UNKNOWN_HOST", Cr.NS_ERROR_UNKNOWN_HOST);
+ try {
+ let ping = yield sync_and_validate_telem(true);
+ deepEqual(ping.status, {
+ service: SYNC_FAILED_PARTIAL,
+ sync: LOGIN_FAILED_NETWORK_ERROR
+ });
+ let enginePing = ping.engines.find(e => e.name === "steam");
+ deepEqual(enginePing.failureReason, {
+ name: "nserror",
+ code: Cr.NS_ERROR_UNKNOWN_HOST
+ });
+ } finally {
+ Service.engineManager.unregister(engine);
+ yield cleanAndGo(server);
+ }
+});
+
+add_identity_test(this, function *test_discarding() {
+ let helper = track_collections_helper();
+ let upd = helper.with_updated_collection;
+ let telem = get_sync_test_telemetry();
+ telem.maxPayloadCount = 2;
+ telem.submissionInterval = Infinity;
+ let oldSubmit = telem.submit;
+
+ let server;
+ try {
+
+ yield configureIdentity({ username: "johndoe" });
+ let handlers = {
+ "/1.1/johndoe/info/collections": helper.handler,
+ "/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
+ "/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler())
+ };
+
+ let collections = ["clients", "bookmarks", "forms", "history", "passwords", "prefs", "tabs"];
+
+ for (let coll of collections) {
+ handlers["/1.1/johndoe/storage/" + coll] = upd(coll, new ServerCollection({}, true).handler());
+ }
+
+ server = httpd_setup(handlers);
+ Service.serverURL = server.baseURI;
+ telem.submit = () => ok(false, "Submitted telemetry ping when we should not have");
+
+ for (let i = 0; i < 5; ++i) {
+ Service.sync();
+ }
+ telem.submit = oldSubmit;
+ telem.submissionInterval = -1;
+ let ping = yield sync_and_validate_telem(true, true); // with this we've synced 6 times
+ equal(ping.syncs.length, 2);
+ equal(ping.discarded, 4);
+ } finally {
+ telem.maxPayloadCount = 500;
+ telem.submissionInterval = -1;
+ telem.submit = oldSubmit;
+ if (server) {
+ yield new Promise(resolve => server.stop(resolve));
+ }
+ }
+})
+
+add_task(function* test_no_foreign_engines_in_error_ping() {
+ Service.engineManager.register(BogusEngine);
+ let engine = Service.engineManager.get("bogus");
+ engine.enabled = true;
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}},
+ steam: {}
+ });
+ engine._errToThrow = new Error("Oh no!");
+ new SyncTestingInfrastructure(server.server);
+ try {
+ let ping = yield sync_and_validate_telem(true);
+ equal(ping.status.service, SYNC_FAILED_PARTIAL);
+ ok(ping.engines.every(e => e.name !== "bogus"));
+ } finally {
+ Service.engineManager.unregister(engine);
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function* test_sql_error() {
+ Service.engineManager.register(SteamEngine);
+ let engine = Service.engineManager.get("steam");
+ engine.enabled = true;
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {steam: {version: engine.version,
+ syncID: engine.syncID}}}},
+ steam: {}
+ });
+ new SyncTestingInfrastructure(server.server);
+ engine._sync = function() {
+ // Just grab a DB connection and issue a bogus SQL statement synchronously.
+ let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase).DBConnection;
+ Async.querySpinningly(db.createAsyncStatement("select bar from foo"));
+ };
+ try {
+ let ping = yield sync_and_validate_telem(true);
+ let enginePing = ping.engines.find(e => e.name === "steam");
+ deepEqual(enginePing.failureReason, { name: "sqlerror", code: 1 });
+ } finally {
+ Service.engineManager.unregister(engine);
+ yield cleanAndGo(server);
+ }
+});
+
+add_task(function* test_no_foreign_engines_in_success_ping() {
+ Service.engineManager.register(BogusEngine);
+ let engine = Service.engineManager.get("bogus");
+ engine.enabled = true;
+ let store = engine._store;
+ let server = serverForUsers({"foo": "password"}, {
+ meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}},
+ steam: {}
+ });
+
+ new SyncTestingInfrastructure(server.server);
+ try {
+ let ping = yield sync_and_validate_telem();
+ ok(ping.engines.every(e => e.name !== "bogus"));
+ } finally {
+ Service.engineManager.unregister(engine);
+ yield cleanAndGo(server);
+ }
+}); \ No newline at end of file
diff --git a/services/sync/tests/unit/test_tracker_addChanged.js b/services/sync/tests/unit/test_tracker_addChanged.js
new file mode 100644
index 000000000..e73bd1162
--- /dev/null
+++ b/services/sync/tests/unit/test_tracker_addChanged.js
@@ -0,0 +1,59 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/engines.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_tracker_basics() {
+ let tracker = new Tracker("Tracker", Service);
+ tracker.persistChangedIDs = false;
+
+ let id = "the_id!";
+
+ _("Make sure nothing exists yet..");
+ do_check_eq(tracker.changedIDs[id], null);
+
+ _("Make sure adding of time 0 works");
+ tracker.addChangedID(id, 0);
+ do_check_eq(tracker.changedIDs[id], 0);
+
+ _("A newer time will replace the old 0");
+ tracker.addChangedID(id, 10);
+ do_check_eq(tracker.changedIDs[id], 10);
+
+ _("An older time will not replace the newer 10");
+ tracker.addChangedID(id, 5);
+ do_check_eq(tracker.changedIDs[id], 10);
+
+ _("Adding without time defaults to current time");
+ tracker.addChangedID(id);
+ do_check_true(tracker.changedIDs[id] > 10);
+
+ run_next_test();
+});
+
+add_test(function test_tracker_persistence() {
+ let tracker = new Tracker("Tracker", Service);
+ let id = "abcdef";
+
+ tracker.persistChangedIDs = true;
+ tracker.onSavedChangedIDs = function () {
+ _("IDs saved.");
+ do_check_eq(5, tracker.changedIDs[id]);
+
+ // Verify the write by reading the file back.
+ Utils.jsonLoad("changes/tracker", this, function (json) {
+ do_check_eq(5, json[id]);
+ tracker.persistChangedIDs = false;
+ delete tracker.onSavedChangedIDs;
+ run_next_test();
+ });
+ };
+
+ tracker.addChangedID(id, 5);
+});
diff --git a/services/sync/tests/unit/test_upgrade_old_sync_key.js b/services/sync/tests/unit/test_upgrade_old_sync_key.js
new file mode 100644
index 000000000..ff75a435a
--- /dev/null
+++ b/services/sync/tests/unit/test_upgrade_old_sync_key.js
@@ -0,0 +1,49 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/service.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://testing-common/services/sync/utils.js");
+
+// Test upgrade of a dashed old-style sync key.
+function run_test() {
+ const PBKDF2_KEY_BYTES = 16;
+ initTestLogging("Trace");
+ ensureLegacyIdentityManager();
+
+ let passphrase = "abcde-abcde-abcde-abcde";
+ do_check_false(Utils.isPassphrase(passphrase));
+
+ let normalized = Utils.normalizePassphrase(passphrase);
+ _("Normalized: " + normalized);
+
+ // Still not a modern passphrase...
+ do_check_false(Utils.isPassphrase(normalized));
+
+ // ... but different.
+ do_check_neq(normalized, passphrase);
+ do_check_eq(normalized, "abcdeabcdeabcdeabcde");
+
+ // Now run through the upgrade.
+ Service.identity.account = "johndoe";
+ Service.syncID = "1234567890";
+ Service.identity.syncKey = normalized; // UI normalizes.
+ do_check_false(Utils.isPassphrase(Service.identity.syncKey));
+ Service.upgradeSyncKey(Service.syncID);
+ let upgraded = Service.identity.syncKey;
+ _("Upgraded: " + upgraded);
+ do_check_true(Utils.isPassphrase(upgraded));
+
+ // Now let's verify that it's been derived correctly, from the normalized
+ // version, and the encoded sync ID.
+ _("Sync ID: " + Service.syncID);
+ let derivedKeyStr =
+ Utils.derivePresentableKeyFromPassphrase(normalized,
+ btoa(Service.syncID),
+ PBKDF2_KEY_BYTES, true);
+ _("Derived: " + derivedKeyStr);
+
+ // Success!
+ do_check_eq(derivedKeyStr, upgraded);
+}
diff --git a/services/sync/tests/unit/test_utils_catch.js b/services/sync/tests/unit/test_utils_catch.js
new file mode 100644
index 000000000..5f50bf7e4
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_catch.js
@@ -0,0 +1,94 @@
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/service.js");
+
+function run_test() {
+ _("Make sure catch when copied to an object will correctly catch stuff");
+ let ret, rightThis, didCall, didThrow, wasTen, wasLocked;
+ let obj = {
+ catch: Utils.catch,
+ _log: {
+ debug: function(str) {
+ didThrow = str.search(/^Exception/) == 0;
+ },
+ info: function(str) {
+ wasLocked = str.indexOf("Cannot start sync: already syncing?") == 0;
+ }
+ },
+
+ func: function() {
+ return this.catch(function() {
+ rightThis = this == obj;
+ didCall = true;
+ return 5;
+ })();
+ },
+
+ throwy: function() {
+ return this.catch(function() {
+ rightThis = this == obj;
+ didCall = true;
+ throw 10;
+ })();
+ },
+
+ callbacky: function() {
+ return this.catch(function() {
+ rightThis = this == obj;
+ didCall = true;
+ throw 10;
+ }, function(ex) {
+ wasTen = (ex == 10)
+ })();
+ },
+
+ lockedy: function() {
+ return this.catch(function() {
+ rightThis = this == obj;
+ didCall = true;
+ throw("Could not acquire lock.");
+ })();
+ }
+ };
+
+ _("Make sure a normal call will call and return");
+ rightThis = didCall = didThrow = wasLocked = false;
+ ret = obj.func();
+ do_check_eq(ret, 5);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+ do_check_false(didThrow);
+ do_check_eq(wasTen, undefined);
+ do_check_false(wasLocked);
+
+ _("Make sure catch/throw results in debug call and caller doesn't need to handle exception");
+ rightThis = didCall = didThrow = wasLocked = false;
+ ret = obj.throwy();
+ do_check_eq(ret, undefined);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+ do_check_true(didThrow);
+ do_check_eq(wasTen, undefined);
+ do_check_false(wasLocked);
+
+ _("Test callback for exception testing.");
+ rightThis = didCall = didThrow = wasLocked = false;
+ ret = obj.callbacky();
+ do_check_eq(ret, undefined);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+ do_check_true(didThrow);
+ do_check_true(wasTen);
+ do_check_false(wasLocked);
+
+ _("Test the lock-aware catch that Service uses.");
+ obj.catch = Service._catch;
+ rightThis = didCall = didThrow = wasLocked = false;
+ wasTen = undefined;
+ ret = obj.lockedy();
+ do_check_eq(ret, undefined);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+ do_check_true(didThrow);
+ do_check_eq(wasTen, undefined);
+ do_check_true(wasLocked);
+}
diff --git a/services/sync/tests/unit/test_utils_deepEquals.js b/services/sync/tests/unit/test_utils_deepEquals.js
new file mode 100644
index 000000000..c75fa0cfa
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_deepEquals.js
@@ -0,0 +1,44 @@
+_("Make sure Utils.deepEquals correctly finds items that are deeply equal");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ let data = '[NaN, undefined, null, true, false, Infinity, 0, 1, "a", "b", {a: 1}, {a: "a"}, [{a: 1}], [{a: true}], {a: 1, b: 2}, [1, 2], [1, 2, 3]]';
+ _("Generating two copies of data:", data);
+ let d1 = eval(data);
+ let d2 = eval(data);
+
+ d1.forEach(function(a) {
+ _("Testing", a, typeof a, JSON.stringify([a]));
+ let numMatch = 0;
+
+ d2.forEach(function(b) {
+ if (Utils.deepEquals(a, b)) {
+ numMatch++;
+ _("Found a match", b, typeof b, JSON.stringify([b]));
+ }
+ });
+
+ let expect = 1;
+ if (isNaN(a) && typeof a == "number") {
+ expect = 0;
+ _("Checking NaN should result in no matches");
+ }
+
+ _("Making sure we found the correct # match:", expect);
+ _("Actual matches:", numMatch);
+ do_check_eq(numMatch, expect);
+ });
+
+ _("Make sure adding undefined properties doesn't affect equalness");
+ let a = {};
+ let b = { a: undefined };
+ do_check_true(Utils.deepEquals(a, b));
+ a.b = 5;
+ do_check_false(Utils.deepEquals(a, b));
+ b.b = 5;
+ do_check_true(Utils.deepEquals(a, b));
+ a.c = undefined;
+ do_check_true(Utils.deepEquals(a, b));
+ b.d = undefined;
+ do_check_true(Utils.deepEquals(a, b));
+}
diff --git a/services/sync/tests/unit/test_utils_deferGetSet.js b/services/sync/tests/unit/test_utils_deferGetSet.js
new file mode 100644
index 000000000..9d58a9873
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_deferGetSet.js
@@ -0,0 +1,49 @@
+_("Make sure various combinations of deferGetSet arguments correctly defer getting/setting properties to another object");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ let base = function() {};
+ base.prototype = {
+ dst: {},
+
+ get a() {
+ return "a";
+ },
+ set b(val) {
+ this.dst.b = val + "!!!";
+ }
+ };
+ let src = new base();
+
+ _("get/set a single property");
+ Utils.deferGetSet(base, "dst", "foo");
+ src.foo = "bar";
+ do_check_eq(src.dst.foo, "bar");
+ do_check_eq(src.foo, "bar");
+
+ _("editing the target also updates the source");
+ src.dst.foo = "baz";
+ do_check_eq(src.dst.foo, "baz");
+ do_check_eq(src.foo, "baz");
+
+ _("handle multiple properties");
+ Utils.deferGetSet(base, "dst", ["p1", "p2"]);
+ src.p1 = "v1";
+ src.p2 = "v2";
+ do_check_eq(src.p1, "v1");
+ do_check_eq(src.dst.p1, "v1");
+ do_check_eq(src.p2, "v2");
+ do_check_eq(src.dst.p2, "v2");
+
+ _("make sure existing getter keeps its functionality");
+ Utils.deferGetSet(base, "dst", "a");
+ src.a = "not a";
+ do_check_eq(src.dst.a, "not a");
+ do_check_eq(src.a, "a");
+
+ _("make sure existing setter keeps its functionality");
+ Utils.deferGetSet(base, "dst", "b");
+ src.b = "b";
+ do_check_eq(src.dst.b, "b!!!");
+ do_check_eq(src.b, "b!!!");
+}
diff --git a/services/sync/tests/unit/test_utils_deriveKey.js b/services/sync/tests/unit/test_utils_deriveKey.js
new file mode 100644
index 000000000..17dd889c7
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_deriveKey.js
@@ -0,0 +1,66 @@
+Cu.import("resource://services-crypto/WeaveCrypto.js");
+Cu.import("resource://services-sync/util.js");
+
+var cryptoSvc = new WeaveCrypto();
+
+function run_test() {
+ if (this.gczeal) {
+ _("Running deriveKey tests with gczeal(2).");
+ gczeal(2);
+ } else {
+ _("Running deriveKey tests with default gczeal.");
+ }
+
+ var iv = cryptoSvc.generateRandomIV();
+ var der_passphrase = "secret phrase";
+ var der_salt = "RE5YUHpQcGl3bg=="; // btoa("DNXPzPpiwn")
+
+ _("Testing deriveKeyFromPassphrase. Input is \"" + der_passphrase + "\", \"" + der_salt + "\" (base64-encoded).");
+
+ // Test friendly-ing.
+ do_check_eq("abcdefghijk8mn9pqrstuvwxyz234567",
+ Utils.base32ToFriendly("ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"));
+ do_check_eq("ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
+ Utils.base32FromFriendly(
+ Utils.base32ToFriendly("ABCDEFGHIJKLMNOPQRSTUVWXYZ234567")));
+
+ // Test translation.
+ do_check_false(Utils.isPassphrase("o-5wmnu-o5tqc-7lz2h-amkbw-izqzi")); // Wrong charset.
+ do_check_false(Utils.isPassphrase("O-5WMNU-O5TQC-7LZ2H-AMKBW-IZQZI")); // Wrong charset.
+ do_check_true(Utils.isPassphrase("9-5wmnu-95tqc-78z2h-amkbw-izqzi"));
+ do_check_true(Utils.isPassphrase("9-5WMNU-95TQC-78Z2H-AMKBW-IZQZI")); // isPassphrase normalizes.
+ do_check_true(Utils.isPassphrase(
+ Utils.normalizePassphrase("9-5WMNU-95TQC-78Z2H-AMKBW-IZQZI")));
+
+ // Base64. We don't actually use this in anger, particularly not with a 32-byte key.
+ var der_key = Utils.deriveEncodedKeyFromPassphrase(der_passphrase, der_salt);
+ _("Derived key in base64: " + der_key);
+ do_check_eq(cryptoSvc.decrypt(cryptoSvc.encrypt("bacon", der_key, iv), der_key, iv), "bacon");
+
+ // Base64, 16-byte output.
+ var der_key = Utils.deriveEncodedKeyFromPassphrase(der_passphrase, der_salt, 16);
+ _("Derived key in base64: " + der_key);
+ do_check_eq("d2zG0d2cBfXnRwMUGyMwyg==", der_key);
+ do_check_eq(cryptoSvc.decrypt(cryptoSvc.encrypt("bacon", der_key, iv), der_key, iv), "bacon");
+
+ // Base32. Again, specify '16' to avoid it generating a 256-bit key string.
+ var b32key = Utils.derivePresentableKeyFromPassphrase(der_passphrase, der_salt, 16);
+ var hyphenated = Utils.hyphenatePassphrase(b32key);
+ do_check_true(Utils.isPassphrase(b32key));
+
+ _("Derived key in base32: " + b32key);
+ do_check_eq(b32key.length, 26);
+ do_check_eq(hyphenated.length, 31); // 1 char, plus 5 groups of 5, hyphenated = 5 + (5*5) + 1 = 31.
+ do_check_eq(hyphenated, "9-5wmnu-95tqc-78z2h-amkbw-izqzi");
+
+ if (this.gczeal)
+ gczeal(0);
+
+ // Test the equivalence of our NSS and JS versions.
+ // Will only work on FF4, of course.
+ // Note that we don't add gczeal here: the pure-JS implementation is
+ // astonishingly slow, and this check takes five minutes to run.
+ do_check_eq(
+ Utils.deriveEncodedKeyFromPassphrase(der_passphrase, der_salt, 16, false),
+ Utils.deriveEncodedKeyFromPassphrase(der_passphrase, der_salt, 16, true));
+}
diff --git a/services/sync/tests/unit/test_utils_getErrorString.js b/services/sync/tests/unit/test_utils_getErrorString.js
new file mode 100644
index 000000000..d64e43540
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_getErrorString.js
@@ -0,0 +1,14 @@
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ let str;
+
+ // we just test whether the returned string includes the
+ // string "unknown", should be good enough
+
+ str = Utils.getErrorString("error.login.reason.account");
+ do_check_true(str.match(/unknown/i) == null);
+
+ str = Utils.getErrorString("foobar");
+ do_check_true(str.match(/unknown/i) != null);
+}
diff --git a/services/sync/tests/unit/test_utils_json.js b/services/sync/tests/unit/test_utils_json.js
new file mode 100644
index 000000000..efa7d9b4d
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_json.js
@@ -0,0 +1,114 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/FileUtils.jsm");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ initTestLogging();
+ run_next_test();
+}
+
+add_test(function test_roundtrip() {
+ _("Do a simple write of an array to json and read");
+ Utils.jsonSave("foo", {}, ["v1", "v2"], ensureThrows(function(error) {
+ do_check_eq(error, null);
+
+ Utils.jsonLoad("foo", {}, ensureThrows(function(val) {
+ let foo = val;
+ do_check_eq(typeof foo, "object");
+ do_check_eq(foo.length, 2);
+ do_check_eq(foo[0], "v1");
+ do_check_eq(foo[1], "v2");
+ run_next_test();
+ }));
+ }));
+});
+
+add_test(function test_string() {
+ _("Try saving simple strings");
+ Utils.jsonSave("str", {}, "hi", ensureThrows(function(error) {
+ do_check_eq(error, null);
+
+ Utils.jsonLoad("str", {}, ensureThrows(function(val) {
+ let str = val;
+ do_check_eq(typeof str, "string");
+ do_check_eq(str.length, 2);
+ do_check_eq(str[0], "h");
+ do_check_eq(str[1], "i");
+ run_next_test();
+ }));
+ }));
+});
+
+add_test(function test_number() {
+ _("Try saving a number");
+ Utils.jsonSave("num", {}, 42, ensureThrows(function(error) {
+ do_check_eq(error, null);
+
+ Utils.jsonLoad("num", {}, ensureThrows(function(val) {
+ let num = val;
+ do_check_eq(typeof num, "number");
+ do_check_eq(num, 42);
+ run_next_test();
+ }));
+ }));
+});
+
+add_test(function test_nonexistent_file() {
+ _("Try loading a non-existent file.");
+ Utils.jsonLoad("non-existent", {}, ensureThrows(function(val) {
+ do_check_eq(val, undefined);
+ run_next_test();
+ }));
+});
+
+add_test(function test_save_logging() {
+ _("Verify that writes are logged.");
+ let trace;
+ Utils.jsonSave("log", {_log: {trace: function(msg) { trace = msg; }}},
+ "hi", ensureThrows(function () {
+ do_check_true(!!trace);
+ run_next_test();
+ }));
+});
+
+add_test(function test_load_logging() {
+ _("Verify that reads and read errors are logged.");
+
+ // Write a file with some invalid JSON
+ let filePath = "weave/log.json";
+ let file = FileUtils.getFile("ProfD", filePath.split("/"), true);
+ let fos = Cc["@mozilla.org/network/file-output-stream;1"]
+ .createInstance(Ci.nsIFileOutputStream);
+ let flags = FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE
+ | FileUtils.MODE_TRUNCATE;
+ fos.init(file, flags, FileUtils.PERMS_FILE, fos.DEFER_OPEN);
+ let stream = Cc["@mozilla.org/intl/converter-output-stream;1"]
+ .createInstance(Ci.nsIConverterOutputStream);
+ stream.init(fos, "UTF-8", 4096, 0x0000);
+ stream.writeString("invalid json!");
+ stream.close();
+
+ let trace, debug;
+ let obj = {
+ _log: {
+ trace: function(msg) {
+ trace = msg;
+ },
+ debug: function(msg) {
+ debug = msg;
+ }
+ }
+ };
+ Utils.jsonLoad("log", obj, ensureThrows(function(val) {
+ do_check_true(!val);
+ do_check_true(!!trace);
+ do_check_true(!!debug);
+ run_next_test();
+ }));
+});
+
+add_task(function* test_undefined_callback() {
+ yield Utils.jsonSave("foo", {}, ["v1", "v2"]);
+});
diff --git a/services/sync/tests/unit/test_utils_keyEncoding.js b/services/sync/tests/unit/test_utils_keyEncoding.js
new file mode 100644
index 000000000..0b39c1575
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_keyEncoding.js
@@ -0,0 +1,15 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ do_check_eq(Utils.encodeKeyBase32("foobarbafoobarba"), "mzxw6ytb9jrgcztpn5rgc4tcme");
+ do_check_eq(Utils.decodeKeyBase32("mzxw6ytb9jrgcztpn5rgc4tcme"), "foobarbafoobarba");
+ do_check_eq(
+ Utils.encodeKeyBase32("\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"),
+ "aeaqcaibaeaqcaibaeaqcaibae");
+ do_check_eq(
+ Utils.decodeKeyBase32("aeaqcaibaeaqcaibaeaqcaibae"),
+ "\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01");
+}
diff --git a/services/sync/tests/unit/test_utils_lazyStrings.js b/services/sync/tests/unit/test_utils_lazyStrings.js
new file mode 100644
index 000000000..68f9b3574
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_lazyStrings.js
@@ -0,0 +1,14 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/stringbundle.js");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ let fn = Utils.lazyStrings("sync");
+ do_check_eq(typeof fn, "function");
+ let bundle = fn();
+ do_check_true(bundle instanceof StringBundle);
+ let url = bundle.url;
+ do_check_eq(url, "chrome://weave/locale/services/sync.properties");
+}
diff --git a/services/sync/tests/unit/test_utils_lock.js b/services/sync/tests/unit/test_utils_lock.js
new file mode 100644
index 000000000..d1830787e
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_lock.js
@@ -0,0 +1,79 @@
+_("Make sure lock prevents calling with a shared lock");
+Cu.import("resource://services-sync/util.js");
+
+// Utility that we only use here.
+
+function do_check_begins(thing, startsWith) {
+ if (!(thing && thing.indexOf && (thing.indexOf(startsWith) == 0)))
+ do_throw(thing + " doesn't begin with " + startsWith);
+}
+
+function run_test() {
+ let ret, rightThis, didCall;
+ let state, lockState, lockedState, unlockState;
+ let obj = {
+ _lock: Utils.lock,
+ lock: function() {
+ lockState = ++state;
+ if (this._locked) {
+ lockedState = ++state;
+ return false;
+ }
+ this._locked = true;
+ return true;
+ },
+ unlock: function() {
+ unlockState = ++state;
+ this._locked = false;
+ },
+
+ func: function() {
+ return this._lock("Test utils lock",
+ function() {
+ rightThis = this == obj;
+ didCall = true;
+ return 5;
+ })();
+ },
+
+ throwy: function() {
+ return this._lock("Test utils lock throwy",
+ function() {
+ rightThis = this == obj;
+ didCall = true;
+ this.throwy();
+ })();
+ }
+ };
+
+ _("Make sure a normal call will call and return");
+ rightThis = didCall = false;
+ state = 0;
+ ret = obj.func();
+ do_check_eq(ret, 5);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+ do_check_eq(lockState, 1);
+ do_check_eq(unlockState, 2);
+ do_check_eq(state, 2);
+
+ _("Make sure code that calls locked code throws");
+ ret = null;
+ rightThis = didCall = false;
+ try {
+ ret = obj.throwy();
+ do_throw("throwy internal call should have thrown!");
+ }
+ catch(ex) {
+ // Should throw an Error, not a string.
+ do_check_begins(ex, "Could not acquire lock");
+ }
+ do_check_eq(ret, null);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+ _("Lock should be called twice so state 3 is skipped");
+ do_check_eq(lockState, 4);
+ do_check_eq(lockedState, 5);
+ do_check_eq(unlockState, 6);
+ do_check_eq(state, 6);
+}
diff --git a/services/sync/tests/unit/test_utils_makeGUID.js b/services/sync/tests/unit/test_utils_makeGUID.js
new file mode 100644
index 000000000..7ce6728b7
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_makeGUID.js
@@ -0,0 +1,40 @@
+Cu.import("resource://services-sync/util.js");
+
+const base64url =
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_";
+
+function run_test() {
+ _("Make sure makeGUID makes guids of the right length/characters");
+ _("Create a bunch of guids to make sure they don't conflict");
+ let guids = [];
+ for (let i = 0; i < 1000; i++) {
+ let newGuid = Utils.makeGUID();
+ _("Generated " + newGuid);
+
+ // Verify that the GUID's length is correct, even when it's URL encoded.
+ do_check_eq(newGuid.length, 12);
+ do_check_eq(encodeURIComponent(newGuid).length, 12);
+
+ // Verify that the GUID only contains base64url characters
+ do_check_true(Array.every(newGuid, function(chr) {
+ return base64url.indexOf(chr) != -1;
+ }));
+
+ // Verify that Utils.checkGUID() correctly identifies them as valid.
+ do_check_true(Utils.checkGUID(newGuid));
+
+ // Verify uniqueness within our sample of 1000. This could cause random
+ // failures, but they should be extremely rare. Otherwise we'd have a
+ // problem with GUID collisions.
+ do_check_true(guids.every(function(g) { return g != newGuid; }));
+ guids.push(newGuid);
+ }
+
+ _("Make sure checkGUID fails for invalid GUIDs");
+ do_check_false(Utils.checkGUID(undefined));
+ do_check_false(Utils.checkGUID(null));
+ do_check_false(Utils.checkGUID(""));
+ do_check_false(Utils.checkGUID("blergh"));
+ do_check_false(Utils.checkGUID("ThisGUIDisWayTooLong"));
+ do_check_false(Utils.checkGUID("Invalid!!!!!"));
+}
diff --git a/services/sync/tests/unit/test_utils_notify.js b/services/sync/tests/unit/test_utils_notify.js
new file mode 100644
index 000000000..5bd38da5f
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_notify.js
@@ -0,0 +1,100 @@
+_("Make sure notify sends out the right notifications");
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ let ret, rightThis, didCall;
+ let obj = {
+ notify: Utils.notify("foo:"),
+ _log: {
+ trace: function() {}
+ },
+
+ func: function() {
+ return this.notify("bar", "baz", function() {
+ rightThis = this == obj;
+ didCall = true;
+ return 5;
+ })();
+ },
+
+ throwy: function() {
+ return this.notify("bad", "one", function() {
+ rightThis = this == obj;
+ didCall = true;
+ throw 10;
+ })();
+ }
+ };
+
+ let state = 0;
+ let makeObs = function(topic) {
+ let obj = {
+ observe: function(subject, topic, data) {
+ this.state = ++state;
+ this.subject = subject;
+ this.topic = topic;
+ this.data = data;
+ }
+ };
+
+ Svc.Obs.add(topic, obj);
+ return obj;
+ };
+
+ _("Make sure a normal call will call and return with notifications");
+ rightThis = didCall = false;
+ let fs = makeObs("foo:bar:start");
+ let ff = makeObs("foo:bar:finish");
+ let fe = makeObs("foo:bar:error");
+ ret = obj.func();
+ do_check_eq(ret, 5);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+
+ do_check_eq(fs.state, 1);
+ do_check_eq(fs.subject, undefined);
+ do_check_eq(fs.topic, "foo:bar:start");
+ do_check_eq(fs.data, "baz");
+
+ do_check_eq(ff.state, 2);
+ do_check_eq(ff.subject, 5);
+ do_check_eq(ff.topic, "foo:bar:finish");
+ do_check_eq(ff.data, "baz");
+
+ do_check_eq(fe.state, undefined);
+ do_check_eq(fe.subject, undefined);
+ do_check_eq(fe.topic, undefined);
+ do_check_eq(fe.data, undefined);
+
+ _("Make sure a throwy call will call and throw with notifications");
+ ret = null;
+ rightThis = didCall = false;
+ let ts = makeObs("foo:bad:start");
+ let tf = makeObs("foo:bad:finish");
+ let te = makeObs("foo:bad:error");
+ try {
+ ret = obj.throwy();
+ do_throw("throwy should have thrown!");
+ }
+ catch(ex) {
+ do_check_eq(ex, 10);
+ }
+ do_check_eq(ret, null);
+ do_check_true(rightThis);
+ do_check_true(didCall);
+
+ do_check_eq(ts.state, 3);
+ do_check_eq(ts.subject, undefined);
+ do_check_eq(ts.topic, "foo:bad:start");
+ do_check_eq(ts.data, "one");
+
+ do_check_eq(tf.state, undefined);
+ do_check_eq(tf.subject, undefined);
+ do_check_eq(tf.topic, undefined);
+ do_check_eq(tf.data, undefined);
+
+ do_check_eq(te.state, 4);
+ do_check_eq(te.subject, 10);
+ do_check_eq(te.topic, "foo:bad:error");
+ do_check_eq(te.data, "one");
+}
diff --git a/services/sync/tests/unit/test_utils_passphrase.js b/services/sync/tests/unit/test_utils_passphrase.js
new file mode 100644
index 000000000..6d34697be
--- /dev/null
+++ b/services/sync/tests/unit/test_utils_passphrase.js
@@ -0,0 +1,73 @@
+Cu.import("resource://services-sync/util.js");
+
+function run_test() {
+ _("Generated passphrase has length 26.");
+ let pp = Utils.generatePassphrase();
+ do_check_eq(pp.length, 26);
+
+ const key = "abcdefghijkmnpqrstuvwxyz23456789";
+ _("Passphrase only contains [" + key + "].");
+ do_check_true(pp.split('').every(chr => key.indexOf(chr) != -1));
+
+ _("Hyphenated passphrase has 5 hyphens.");
+ let hyphenated = Utils.hyphenatePassphrase(pp);
+ _("H: " + hyphenated);
+ do_check_eq(hyphenated.length, 31);
+ do_check_eq(hyphenated[1], '-');
+ do_check_eq(hyphenated[7], '-');
+ do_check_eq(hyphenated[13], '-');
+ do_check_eq(hyphenated[19], '-');
+ do_check_eq(hyphenated[25], '-');
+ do_check_eq(pp,
+ hyphenated.slice(0, 1) + hyphenated.slice(2, 7)
+ + hyphenated.slice(8, 13) + hyphenated.slice(14, 19)
+ + hyphenated.slice(20, 25) + hyphenated.slice(26, 31));
+
+ _("Arbitrary hyphenation.");
+ // We don't allow invalid characters for our base32 character set.
+ do_check_eq(Utils.hyphenatePassphrase("1234567"), "2-34567"); // Not partial, so no trailing dash.
+ do_check_eq(Utils.hyphenatePassphrase("1234567890"), "2-34567-89");
+ do_check_eq(Utils.hyphenatePassphrase("abcdeabcdeabcdeabcdeabcde"), "a-bcdea-bcdea-bcdea-bcdea-bcde");
+ do_check_eq(Utils.hyphenatePartialPassphrase("1234567"), "2-34567-");
+ do_check_eq(Utils.hyphenatePartialPassphrase("1234567890"), "2-34567-89");
+ do_check_eq(Utils.hyphenatePartialPassphrase("abcdeabcdeabcdeabcdeabcde"), "a-bcdea-bcdea-bcdea-bcdea-bcde");
+
+ do_check_eq(Utils.hyphenatePartialPassphrase("a"), "a-");
+ do_check_eq(Utils.hyphenatePartialPassphrase("1234567"), "2-34567-");
+ do_check_eq(Utils.hyphenatePartialPassphrase("a-bcdef-g"),
+ "a-bcdef-g");
+ do_check_eq(Utils.hyphenatePartialPassphrase("abcdefghijklmnop"),
+ "a-bcdef-ghijk-mnp");
+ do_check_eq(Utils.hyphenatePartialPassphrase("abcdefghijklmnopabcde"),
+ "a-bcdef-ghijk-mnpab-cde");
+ do_check_eq(Utils.hyphenatePartialPassphrase("a-bcdef-ghijk-LMNOP-ABCDE-Fg"),
+ "a-bcdef-ghijk-mnpab-cdefg-");
+ // Cuts off.
+ do_check_eq(Utils.hyphenatePartialPassphrase("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa").length, 31);
+
+ _("Normalize passphrase recognizes hyphens.");
+ do_check_eq(Utils.normalizePassphrase(hyphenated), pp);
+
+ _("Skip whitespace.");
+ do_check_eq("aaaaaaaaaaaaaaaaaaaaaaaaaa", Utils.normalizePassphrase("aaaaaaaaaaaaaaaaaaaaaaaaaa "));
+ do_check_eq("aaaaaaaaaaaaaaaaaaaaaaaaaa", Utils.normalizePassphrase(" aaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ do_check_eq("aaaaaaaaaaaaaaaaaaaaaaaaaa", Utils.normalizePassphrase(" aaaaaaaaaaaaaaaaaaaaaaaaaa "));
+ do_check_eq("aaaaaaaaaaaaaaaaaaaaaaaaaa", Utils.normalizePassphrase(" a-aaaaa-aaaaa-aaaaa-aaaaa-aaaaa "));
+ do_check_true(Utils.isPassphrase("aaaaaaaaaaaaaaaaaaaaaaaaaa "));
+ do_check_true(Utils.isPassphrase(" aaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ do_check_true(Utils.isPassphrase(" aaaaaaaaaaaaaaaaaaaaaaaaaa "));
+ do_check_true(Utils.isPassphrase(" a-aaaaa-aaaaa-aaaaa-aaaaa-aaaaa "));
+ do_check_false(Utils.isPassphrase(" -aaaaa-aaaaa-aaaaa-aaaaa-aaaaa "));
+
+ _("Normalizing 20-char passphrases.");
+ do_check_eq(Utils.normalizePassphrase("abcde-abcde-abcde-abcde"),
+ "abcdeabcdeabcdeabcde");
+ do_check_eq(Utils.normalizePassphrase("a-bcde-abcde-abcde-abcde"),
+ "a-bcde-abcde-abcde-abcde");
+ do_check_eq(Utils.normalizePassphrase(" abcde-abcde-abcde-abcde "),
+ "abcdeabcdeabcdeabcde");
+
+ _("Normalizing username.");
+ do_check_eq(Utils.normalizeAccount(" QA1234+boo@mozilla.com "), "QA1234+boo@mozilla.com");
+ do_check_eq(Utils.normalizeAccount("QA1234+boo@mozilla.com"), "QA1234+boo@mozilla.com");
+}
diff --git a/services/sync/tests/unit/test_warn_on_truncated_response.js b/services/sync/tests/unit/test_warn_on_truncated_response.js
new file mode 100644
index 000000000..1f0d87ba9
--- /dev/null
+++ b/services/sync/tests/unit/test_warn_on_truncated_response.js
@@ -0,0 +1,95 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://testing-common/httpd.js");
+Cu.import("resource://services-sync/resource.js");
+Cu.import("resource://services-sync/rest.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+var BODY = "response body";
+// contentLength needs to be longer than the response body
+// length in order to get a mismatch between what is sent in
+// the response and the content-length header value.
+var contentLength = BODY.length + 1;
+
+function contentHandler(request, response) {
+ _("Handling request.");
+ response.setHeader("Content-Type", "text/plain");
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(BODY, contentLength);
+}
+
+function getWarningMessages(log) {
+ let warnMessages = [];
+ let warn = log.warn;
+ log.warn = function (message) {
+ let regEx = /The response body\'s length of: \d+ doesn\'t match the header\'s content-length of: \d+/i
+ if (message.match(regEx)) {
+ warnMessages.push(message);
+ }
+ warn.call(log, message);
+ }
+ return warnMessages;
+}
+
+add_test(function test_resource_logs_content_length_mismatch() {
+ _("Issuing request.");
+ let httpServer = httpd_setup({"/content": contentHandler});
+ let resource = new Resource(httpServer.baseURI + "/content");
+
+ let warnMessages = getWarningMessages(resource._log);
+ let result = resource.get();
+
+ notEqual(warnMessages.length, 0, "test that a warning was logged");
+ notEqual(result.length, contentLength);
+ equal(result, BODY);
+
+ httpServer.stop(run_next_test);
+});
+
+add_test(function test_async_resource_logs_content_length_mismatch() {
+ _("Issuing request.");
+ let httpServer = httpd_setup({"/content": contentHandler});
+ let asyncResource = new AsyncResource(httpServer.baseURI + "/content");
+
+ let warnMessages = getWarningMessages(asyncResource._log);
+
+ asyncResource.get(function (error, content) {
+ equal(error, null);
+ equal(content, BODY);
+ notEqual(warnMessages.length, 0, "test that warning was logged");
+ notEqual(content.length, contentLength);
+ httpServer.stop(run_next_test);
+ });
+});
+
+add_test(function test_sync_storage_request_logs_content_length_mismatch() {
+ _("Issuing request.");
+ let httpServer = httpd_setup({"/content": contentHandler});
+ let request = new SyncStorageRequest(httpServer.baseURI + "/content");
+ let warnMessages = getWarningMessages(request._log);
+
+ // Setting this affects how received data is read from the underlying
+ // nsIHttpChannel in rest.js. If it's left as UTF-8 (the default) an
+ // nsIConverterInputStream is used and the data read from channel's stream
+ // isn't truncated at the null byte mark (\u0000). Therefore the
+ // content-length mismatch being tested for doesn't occur. Setting it to
+ // a falsy value results in an nsIScriptableInputStream being used to read
+ // the stream, which stops reading at the null byte mark resulting in a
+ // content-length mismatch.
+ request.charset = "";
+
+ request.get(function (error) {
+ equal(error, null);
+ equal(this.response.body, BODY);
+ notEqual(warnMessages.length, 0, "test that a warning was logged");
+ notEqual(BODY.length, contentLength);
+ httpServer.stop(run_next_test);
+ });
+});
diff --git a/services/sync/tests/unit/xpcshell.ini b/services/sync/tests/unit/xpcshell.ini
new file mode 100644
index 000000000..e5b32e7b1
--- /dev/null
+++ b/services/sync/tests/unit/xpcshell.ini
@@ -0,0 +1,200 @@
+[DEFAULT]
+head = head_appinfo.js ../../../common/tests/unit/head_helpers.js head_helpers.js head_http_server.js head_errorhandler_common.js
+tail =
+firefox-appdir = browser
+support-files =
+ addon1-search.xml
+ bootstrap1-search.xml
+ fake_login_manager.js
+ missing-sourceuri.xml
+ missing-xpi-search.xml
+ places_v10_from_v11.sqlite
+ rewrite-search.xml
+ sync_ping_schema.json
+ systemaddon-search.xml
+ !/services/common/tests/unit/head_helpers.js
+ !/toolkit/mozapps/extensions/test/xpcshell/head_addons.js
+ !/toolkit/components/extensions/test/xpcshell/head_sync.js
+
+# The manifest is roughly ordered from low-level to high-level. When making
+# systemic sweeping changes, this makes it easier to identify errors closer to
+# the source.
+
+# Ensure we can import everything.
+[test_load_modules.js]
+
+# util contains a bunch of functionality used throughout.
+[test_utils_catch.js]
+[test_utils_deepEquals.js]
+[test_utils_deferGetSet.js]
+[test_utils_deriveKey.js]
+[test_utils_keyEncoding.js]
+[test_utils_getErrorString.js]
+[test_utils_json.js]
+[test_utils_lazyStrings.js]
+[test_utils_lock.js]
+[test_utils_makeGUID.js]
+[test_utils_notify.js]
+[test_utils_passphrase.js]
+
+# We have a number of other libraries that are pretty much standalone.
+[test_addon_utils.js]
+run-sequentially = Restarts server, can't change pref.
+tags = addons
+[test_httpd_sync_server.js]
+[test_jpakeclient.js]
+# Bug 618233: this test produces random failures on Windows 7.
+# Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini)
+skip-if = os == "win" || os == "android"
+
+# HTTP layers.
+[test_resource.js]
+[test_resource_async.js]
+[test_resource_header.js]
+[test_resource_ua.js]
+[test_syncstoragerequest.js]
+
+# Generic Sync types.
+[test_browserid_identity.js]
+[test_collection_inc_get.js]
+[test_collection_getBatched.js]
+[test_collections_recovery.js]
+[test_identity_manager.js]
+[test_keys.js]
+[test_records_crypto.js]
+[test_records_wbo.js]
+
+# Engine APIs.
+[test_engine.js]
+[test_engine_abort.js]
+[test_enginemanager.js]
+[test_syncengine.js]
+[test_syncengine_sync.js]
+# Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini)
+skip-if = os == "android"
+[test_tracker_addChanged.js]
+
+# Service semantics.
+[test_service_attributes.js]
+[test_service_changePassword.js]
+# Bug 752243: Profile cleanup frequently fails
+skip-if = os == "mac" || os == "linux"
+[test_service_checkAccount.js]
+[test_service_cluster.js]
+[test_service_createAccount.js]
+# Bug 752243: Profile cleanup frequently fails
+skip-if = os == "mac" || os == "linux"
+[test_service_detect_upgrade.js]
+[test_service_getStorageInfo.js]
+[test_service_login.js]
+[test_service_migratePrefs.js]
+[test_service_passwordUTF8.js]
+[test_service_persistLogin.js]
+[test_service_set_serverURL.js]
+[test_service_startOver.js]
+[test_service_startup.js]
+[test_service_sync_401.js]
+[test_service_sync_locked.js]
+[test_service_sync_remoteSetup.js]
+# Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini)
+skip-if = os == "android"
+[test_service_sync_specified.js]
+[test_service_sync_updateEnabledEngines.js]
+# Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini)
+skip-if = os == "android"
+[test_service_verifyLogin.js]
+[test_service_wipeClient.js]
+[test_service_wipeServer.js]
+# Bug 752243: Profile cleanup frequently fails
+skip-if = os == "mac" || os == "linux"
+
+[test_corrupt_keys.js]
+[test_declined.js]
+[test_errorhandler_1.js]
+[test_errorhandler_2.js]
+[test_errorhandler_filelog.js]
+# Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini)
+skip-if = os == "android"
+[test_errorhandler_sync_checkServerError.js]
+# Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini)
+skip-if = os == "android"
+[test_errorhandler_eol.js]
+[test_hmac_error.js]
+[test_interval_triggers.js]
+[test_node_reassignment.js]
+[test_score_triggers.js]
+[test_sendcredentials_controller.js]
+[test_status.js]
+[test_status_checkSetup.js]
+[test_syncscheduler.js]
+[test_upgrade_old_sync_key.js]
+
+# Firefox Accounts specific tests
+[test_fxa_startOver.js]
+[test_fxa_service_cluster.js]
+[test_fxa_node_reassignment.js]
+
+# Finally, we test each engine.
+[test_addons_engine.js]
+run-sequentially = Hardcoded port in static files.
+tags = addons
+[test_addons_reconciler.js]
+tags = addons
+[test_addons_store.js]
+run-sequentially = Hardcoded port in static files.
+tags = addons
+[test_addons_tracker.js]
+tags = addons
+[test_bookmark_batch_fail.js]
+[test_bookmark_duping.js]
+[test_bookmark_engine.js]
+[test_bookmark_invalid.js]
+[test_bookmark_legacy_microsummaries_support.js]
+[test_bookmark_livemarks.js]
+[test_bookmark_order.js]
+[test_bookmark_places_query_rewriting.js]
+[test_bookmark_record.js]
+[test_bookmark_smart_bookmarks.js]
+[test_bookmark_store.js]
+# Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479)
+skip-if = debug
+[test_bookmark_tracker.js]
+requesttimeoutfactor = 4
+[test_bookmark_validator.js]
+[test_clients_engine.js]
+[test_clients_escape.js]
+[test_extension_storage_crypto.js]
+[test_extension_storage_engine.js]
+[test_extension_storage_tracker.js]
+[test_forms_store.js]
+[test_forms_tracker.js]
+# Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479)
+skip-if = debug
+[test_history_engine.js]
+[test_history_store.js]
+[test_history_tracker.js]
+# Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479)
+skip-if = debug
+[test_places_guid_downgrade.js]
+[test_password_store.js]
+[test_password_validator.js]
+[test_password_tracker.js]
+# Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479)
+skip-if = debug
+[test_prefs_store.js]
+support-files = prefs_test_prefs_store.js
+[test_prefs_tracker.js]
+[test_tab_engine.js]
+[test_tab_store.js]
+[test_tab_tracker.js]
+
+[test_warn_on_truncated_response.js]
+[test_postqueue.js]
+
+# FxA migration
+[test_fxa_migration.js]
+
+# Synced tabs.
+[test_syncedtabs.js]
+
+[test_telemetry.js]
diff --git a/services/sync/tps/extensions/mozmill/chrome.manifest b/services/sync/tps/extensions/mozmill/chrome.manifest
new file mode 100755
index 000000000..dfb370321
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/chrome.manifest
@@ -0,0 +1,2 @@
+resource mozmill resource/
+
diff --git a/services/sync/tps/extensions/mozmill/install.rdf b/services/sync/tps/extensions/mozmill/install.rdf
new file mode 100755
index 000000000..bbc759cf1
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/install.rdf
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>mozmill@mozilla.com</em:id>
+ <em:name>Mozmill</em:name>
+ <em:version>2.0.8</em:version>
+ <em:description>UI Automation tool for Mozilla applications</em:description>
+ <em:unpack>true</em:unpack>
+
+ <em:creator>Mozilla Automation and Testing Team</em:creator>
+ <em:contributor>Adam Christian</em:contributor>
+ <em:contributor>Mikeal Rogers</em:contributor>
+
+ <em:targetApplication>
+ <Description>
+ <em:id>toolkit@mozilla.org</em:id>
+ <em:minVersion>10.0</em:minVersion>
+ <em:maxVersion>38.*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/services/sync/tps/extensions/mozmill/resource/driver/controller.js b/services/sync/tps/extensions/mozmill/resource/driver/controller.js
new file mode 100644
index 000000000..a378ce51f
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/driver/controller.js
@@ -0,0 +1,1141 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ["MozMillController", "globalEventRegistry",
+ "sleep", "windowMap"];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+var EventUtils = {}; Cu.import('resource://mozmill/stdlib/EventUtils.js', EventUtils);
+
+var assertions = {}; Cu.import('resource://mozmill/modules/assertions.js', assertions);
+var broker = {}; Cu.import('resource://mozmill/driver/msgbroker.js', broker);
+var elementslib = {}; Cu.import('resource://mozmill/driver/elementslib.js', elementslib);
+var errors = {}; Cu.import('resource://mozmill/modules/errors.js', errors);
+var mozelement = {}; Cu.import('resource://mozmill/driver/mozelement.js', mozelement);
+var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils);
+var windows = {}; Cu.import('resource://mozmill/modules/windows.js', windows);
+
+// Declare most used utils functions in the controller namespace
+var assert = new assertions.Assert();
+var waitFor = assert.waitFor;
+
+var sleep = utils.sleep;
+
+// For Mozmill 1.5 backward compatibility
+var windowMap = windows.map;
+
+waitForEvents = function () {
+}
+
+waitForEvents.prototype = {
+ /**
+ * Initialize list of events for given node
+ */
+ init: function waitForEvents_init(node, events) {
+ if (node.getNode != undefined)
+ node = node.getNode();
+
+ this.events = events;
+ this.node = node;
+ node.firedEvents = {};
+ this.registry = {};
+
+ if (!events) {
+ return;
+ }
+ for (var key in events) {
+ var e = events[key];
+ var listener = function (event) {
+ this.firedEvents[event.type] = true;
+ }
+
+ this.registry[e] = listener;
+ this.registry[e].result = false;
+ this.node.addEventListener(e, this.registry[e], true);
+ }
+ },
+
+ /**
+ * Wait until all assigned events have been fired
+ */
+ wait: function waitForEvents_wait(timeout, interval) {
+ for (var e in this.registry) {
+ assert.waitFor(function () {
+ return this.node.firedEvents[e] == true;
+ }, "waitForEvents.wait(): Event '" + ex + "' has been fired.", timeout, interval);
+
+ this.node.removeEventListener(e, this.registry[e], true);
+ }
+ }
+}
+
+/**
+ * Class to handle menus and context menus
+ *
+ * @constructor
+ * @param {MozMillController} controller
+ * Mozmill controller of the window under test
+ * @param {string} menuSelector
+ * jQuery like selector string of the element
+ * @param {object} document
+ * Document to use for finding the menu
+ * [optional - default: aController.window.document]
+ */
+var Menu = function (controller, menuSelector, document) {
+ this._controller = controller;
+ this._menu = null;
+
+ document = document || controller.window.document;
+ var node = document.querySelector(menuSelector);
+ if (node) {
+ // We don't unwrap nodes automatically yet (Bug 573185)
+ node = node.wrappedJSObject || node;
+ this._menu = new mozelement.Elem(node);
+ } else {
+ throw new Error("Menu element '" + menuSelector + "' not found.");
+ }
+}
+
+Menu.prototype = {
+
+ /**
+ * Open and populate the menu
+ *
+ * @param {ElemBase} contextElement
+ * Element whose context menu has to be opened
+ * @returns {Menu} The Menu instance
+ */
+ open: function Menu_open(contextElement) {
+ // We have to open the context menu
+ var menu = this._menu.getNode();
+ if ((menu.localName == "popup" || menu.localName == "menupopup") &&
+ contextElement && contextElement.exists()) {
+ this._controller.rightClick(contextElement);
+ assert.waitFor(function () {
+ return menu.state == "open";
+ }, "Context menu has been opened.");
+ }
+
+ // Run through the entire menu and populate with dynamic entries
+ this._buildMenu(menu);
+
+ return this;
+ },
+
+ /**
+ * Close the menu
+ *
+ * @returns {Menu} The Menu instance
+ */
+ close: function Menu_close() {
+ var menu = this._menu.getNode();
+
+ this._controller.keypress(this._menu, "VK_ESCAPE", {});
+ assert.waitFor(function () {
+ return menu.state == "closed";
+ }, "Context menu has been closed.");
+
+ return this;
+ },
+
+ /**
+ * Retrieve the specified menu entry
+ *
+ * @param {string} itemSelector
+ * jQuery like selector string of the menu item
+ * @returns {ElemBase} Menu element
+ * @throws Error If menu element has not been found
+ */
+ getItem: function Menu_getItem(itemSelector) {
+ // Run through the entire menu and populate with dynamic entries
+ this._buildMenu(this._menu.getNode());
+
+ var node = this._menu.getNode().querySelector(itemSelector);
+
+ if (!node) {
+ throw new Error("Menu entry '" + itemSelector + "' not found.");
+ }
+
+ return new mozelement.Elem(node);
+ },
+
+ /**
+ * Click the specified menu entry
+ *
+ * @param {string} itemSelector
+ * jQuery like selector string of the menu item
+ *
+ * @returns {Menu} The Menu instance
+ */
+ click: function Menu_click(itemSelector) {
+ this._controller.click(this.getItem(itemSelector));
+
+ return this;
+ },
+
+ /**
+ * Synthesize a keypress against the menu
+ *
+ * @param {string} key
+ * Key to press
+ * @param {object} modifier
+ * Key modifiers
+ * @see MozMillController#keypress
+ *
+ * @returns {Menu} The Menu instance
+ */
+ keypress: function Menu_keypress(key, modifier) {
+ this._controller.keypress(this._menu, key, modifier);
+
+ return this;
+ },
+
+ /**
+ * Opens the context menu, click the specified entry and
+ * make sure that the menu has been closed.
+ *
+ * @param {string} itemSelector
+ * jQuery like selector string of the element
+ * @param {ElemBase} contextElement
+ * Element whose context menu has to be opened
+ *
+ * @returns {Menu} The Menu instance
+ */
+ select: function Menu_select(itemSelector, contextElement) {
+ this.open(contextElement);
+ this.click(itemSelector);
+ this.close();
+ },
+
+ /**
+ * Recursive function which iterates through all menu elements and
+ * populates the menus with dynamic menu entries.
+ *
+ * @param {node} menu
+ * Top menu node whose elements have to be populated
+ */
+ _buildMenu: function Menu__buildMenu(menu) {
+ var items = menu ? menu.childNodes : null;
+
+ Array.forEach(items, function (item) {
+ // When we have a menu node, fake a click onto it to populate
+ // the sub menu with dynamic entries
+ if (item.tagName == "menu") {
+ var popup = item.querySelector("menupopup");
+
+ if (popup) {
+ var popupEvent = this._controller.window.document.createEvent("MouseEvent");
+ popupEvent.initMouseEvent("popupshowing", true, true,
+ this._controller.window, 0, 0, 0, 0, 0,
+ false, false, false, false, 0, null);
+ popup.dispatchEvent(popupEvent);
+
+ this._buildMenu(popup);
+ }
+ }
+ }, this);
+ }
+};
+
+var MozMillController = function (window) {
+ this.window = window;
+
+ this.mozmillModule = {};
+ Cu.import('resource://mozmill/driver/mozmill.js', this.mozmillModule);
+
+ var self = this;
+ assert.waitFor(function () {
+ return window != null && self.isLoaded();
+ }, "controller(): Window has been initialized.");
+
+ // Ensure to focus the window which will move it virtually into the foreground
+ // when focusmanager.testmode is set enabled.
+ this.window.focus();
+
+ var windowType = window.document.documentElement.getAttribute('windowtype');
+ if (controllerAdditions[windowType] != undefined ) {
+ this.prototype = new utils.Copy(this.prototype);
+ controllerAdditions[windowType](this);
+ this.windowtype = windowType;
+ }
+}
+
+/**
+ * Returns the global browser object of the window
+ *
+ * @returns {Object} The browser object
+ */
+MozMillController.prototype.__defineGetter__("browserObject", function () {
+ return utils.getBrowserObject(this.window);
+});
+
+// constructs a MozMillElement from the controller's window
+MozMillController.prototype.__defineGetter__("rootElement", function () {
+ if (this._rootElement == undefined) {
+ let docElement = this.window.document.documentElement;
+ this._rootElement = new mozelement.MozMillElement("Elem", docElement);
+ }
+
+ return this._rootElement;
+});
+
+MozMillController.prototype.sleep = utils.sleep;
+MozMillController.prototype.waitFor = assert.waitFor;
+
+// Open the specified url in the current tab
+MozMillController.prototype.open = function (url) {
+ switch (this.mozmillModule.Application) {
+ case "Firefox":
+ // Stop a running page load to not overlap requests
+ if (this.browserObject.selectedBrowser) {
+ this.browserObject.selectedBrowser.stop();
+ }
+
+ this.browserObject.loadURI(url);
+ break;
+
+ default:
+ throw new Error("MozMillController.open not supported.");
+ }
+
+ broker.pass({'function':'Controller.open()'});
+}
+
+/**
+ * Take a screenshot of specified node
+ *
+ * @param {Element} node
+ * The window or DOM element to capture
+ * @param {String} name
+ * The name of the screenshot used in reporting and as filename
+ * @param {Boolean} save
+ * If true saves the screenshot as 'name.jpg' in tempdir,
+ * otherwise returns a dataURL
+ * @param {Element[]} highlights
+ * A list of DOM elements to highlight by drawing a red rectangle around them
+ *
+ * @returns {Object} Object which contains properties like filename, dataURL,
+ * name and timestamp of the screenshot
+ */
+MozMillController.prototype.screenshot = function (node, name, save, highlights) {
+ if (!node) {
+ throw new Error("node is undefined");
+ }
+
+ // Unwrap the node and highlights
+ if ("getNode" in node) {
+ node = node.getNode();
+ }
+
+ if (highlights) {
+ for (var i = 0; i < highlights.length; ++i) {
+ if ("getNode" in highlights[i]) {
+ highlights[i] = highlights[i].getNode();
+ }
+ }
+ }
+
+ // If save is false, a dataURL is used
+ // Include both in the report anyway to avoid confusion and make the report easier to parse
+ var screenshot = {"filename": undefined,
+ "dataURL": utils.takeScreenshot(node, highlights),
+ "name": name,
+ "timestamp": new Date().toLocaleString()};
+
+ if (!save) {
+ return screenshot;
+ }
+
+ // Save the screenshot to disk
+
+ let {filename, failure} = utils.saveDataURL(screenshot.dataURL, name);
+ screenshot.filename = filename;
+ screenshot.failure = failure;
+
+ if (failure) {
+ broker.log({'function': 'controller.screenshot()',
+ 'message': 'Error writing to file: ' + screenshot.filename});
+ } else {
+ // Send the screenshot object to python over jsbridge
+ broker.sendMessage("screenshot", screenshot);
+ broker.pass({'function': 'controller.screenshot()'});
+ }
+
+ return screenshot;
+}
+
+/**
+ * Checks if the specified window has been loaded
+ *
+ * @param {DOMWindow} [aWindow=this.window] Window object to check for loaded state
+ */
+MozMillController.prototype.isLoaded = function (aWindow) {
+ var win = aWindow || this.window;
+
+ return windows.map.getValue(utils.getWindowId(win), "loaded") || false;
+};
+
+MozMillController.prototype.__defineGetter__("waitForEvents", function () {
+ if (this._waitForEvents == undefined) {
+ this._waitForEvents = new waitForEvents();
+ }
+
+ return this._waitForEvents;
+});
+
+/**
+ * Wrapper function to create a new instance of a menu
+ * @see Menu
+ */
+MozMillController.prototype.getMenu = function (menuSelector, document) {
+ return new Menu(this, menuSelector, document);
+};
+
+MozMillController.prototype.__defineGetter__("mainMenu", function () {
+ return this.getMenu("menubar");
+});
+
+MozMillController.prototype.__defineGetter__("menus", function () {
+ logDeprecated('controller.menus', 'Use controller.mainMenu instead');
+});
+
+MozMillController.prototype.waitForImage = function (aElement, timeout, interval) {
+ this.waitFor(function () {
+ return aElement.getNode().complete == true;
+ }, "timeout exceeded for waitForImage " + aElement.getInfo(), timeout, interval);
+
+ broker.pass({'function':'Controller.waitForImage()'});
+}
+
+MozMillController.prototype.startUserShutdown = function (timeout, restart, next, resetProfile) {
+ if (restart && resetProfile) {
+ throw new Error("You can't have a user-restart and reset the profile; there is a race condition");
+ }
+
+ let shutdownObj = {
+ 'user': true,
+ 'restart': Boolean(restart),
+ 'next': next,
+ 'resetProfile': Boolean(resetProfile),
+ 'timeout': timeout
+ };
+
+ broker.sendMessage('shutdown', shutdownObj);
+}
+
+/**
+ * Restart the application
+ *
+ * @param {string} aNext
+ * Name of the next test function to run after restart
+ * @param {boolean} [aFlags=undefined]
+ * Additional flags how to handle the shutdown or restart. The attributes
+ * eRestarti386 (0x20) and eRestartx86_64 (0x30) have not been documented yet.
+ * @see https://developer.mozilla.org/nsIAppStartup#Attributes
+ */
+MozMillController.prototype.restartApplication = function (aNext, aFlags) {
+ var flags = Ci.nsIAppStartup.eAttemptQuit | Ci.nsIAppStartup.eRestart;
+
+ if (aFlags) {
+ flags |= aFlags;
+ }
+
+ broker.sendMessage('shutdown', {'user': false,
+ 'restart': true,
+ 'flags': flags,
+ 'next': aNext,
+ 'timeout': 0 });
+
+ // We have to ensure to stop the test from continuing until the application is
+ // shutting down. The only way to do that is by throwing an exception.
+ throw new errors.ApplicationQuitError();
+}
+
+/**
+ * Stop the application
+ *
+ * @param {boolean} [aResetProfile=false]
+ * Whether to reset the profile during restart
+ * @param {boolean} [aFlags=undefined]
+ * Additional flags how to handle the shutdown or restart. The attributes
+ * eRestarti386 and eRestartx86_64 have not been documented yet.
+ * @see https://developer.mozilla.org/nsIAppStartup#Attributes
+ */
+MozMillController.prototype.stopApplication = function (aResetProfile, aFlags) {
+ var flags = Ci.nsIAppStartup.eAttemptQuit;
+
+ if (aFlags) {
+ flags |= aFlags;
+ }
+
+ broker.sendMessage('shutdown', {'user': false,
+ 'restart': false,
+ 'flags': flags,
+ 'resetProfile': aResetProfile,
+ 'timeout': 0 });
+
+ // We have to ensure to stop the test from continuing until the application is
+ // shutting down. The only way to do that is by throwing an exception.
+ throw new errors.ApplicationQuitError();
+}
+
+//Browser navigation functions
+MozMillController.prototype.goBack = function () {
+ this.window.content.history.back();
+ broker.pass({'function':'Controller.goBack()'});
+
+ return true;
+}
+
+MozMillController.prototype.goForward = function () {
+ this.window.content.history.forward();
+ broker.pass({'function':'Controller.goForward()'});
+
+ return true;
+}
+
+MozMillController.prototype.refresh = function () {
+ this.window.content.location.reload(true);
+ broker.pass({'function':'Controller.refresh()'});
+
+ return true;
+}
+
+function logDeprecated(funcName, message) {
+ broker.log({'function': funcName + '() - DEPRECATED',
+ 'message': funcName + '() is deprecated. ' + message});
+}
+
+function logDeprecatedAssert(funcName) {
+ logDeprecated('controller.' + funcName,
+ '. Use the generic `assertion` module instead.');
+}
+
+MozMillController.prototype.assertText = function (el, text) {
+ logDeprecatedAssert("assertText");
+
+ var n = el.getNode();
+
+ if (n && n.innerHTML == text) {
+ broker.pass({'function': 'Controller.assertText()'});
+ } else {
+ throw new Error("could not validate element " + el.getInfo() +
+ " with text "+ text);
+ }
+
+ return true;
+};
+
+/**
+ * Assert that a specified node exists
+ */
+MozMillController.prototype.assertNode = function (el) {
+ logDeprecatedAssert("assertNode");
+
+ //this.window.focus();
+ var element = el.getNode();
+ if (!element) {
+ throw new Error("could not find element " + el.getInfo());
+ }
+
+ broker.pass({'function': 'Controller.assertNode()'});
+ return true;
+};
+
+/**
+ * Assert that a specified node doesn't exist
+ */
+MozMillController.prototype.assertNodeNotExist = function (el) {
+ logDeprecatedAssert("assertNodeNotExist");
+
+ try {
+ var element = el.getNode();
+ } catch (e) {
+ broker.pass({'function': 'Controller.assertNodeNotExist()'});
+ }
+
+ if (element) {
+ throw new Error("Unexpectedly found element " + el.getInfo());
+ } else {
+ broker.pass({'function':'Controller.assertNodeNotExist()'});
+ }
+
+ return true;
+};
+
+/**
+ * Assert that a form element contains the expected value
+ */
+MozMillController.prototype.assertValue = function (el, value) {
+ logDeprecatedAssert("assertValue");
+
+ var n = el.getNode();
+
+ if (n && n.value == value) {
+ broker.pass({'function': 'Controller.assertValue()'});
+ } else {
+ throw new Error("could not validate element " + el.getInfo() +
+ " with value " + value);
+ }
+
+ return false;
+};
+
+/**
+ * Check if the callback function evaluates to true
+ */
+MozMillController.prototype.assert = function (callback, message, thisObject) {
+ logDeprecatedAssert("assert");
+
+ utils.assert(callback, message, thisObject);
+ broker.pass({'function': ": controller.assert('" + callback + "')"});
+
+ return true;
+}
+
+/**
+ * Assert that a provided value is selected in a select element
+ */
+MozMillController.prototype.assertSelected = function (el, value) {
+ logDeprecatedAssert("assertSelected");
+
+ var n = el.getNode();
+ var validator = value;
+
+ if (n && n.options[n.selectedIndex].value == validator) {
+ broker.pass({'function':'Controller.assertSelected()'});
+ } else {
+ throw new Error("could not assert value for element " + el.getInfo() +
+ " with value " + value);
+ }
+
+ return true;
+};
+
+/**
+ * Assert that a provided checkbox is checked
+ */
+MozMillController.prototype.assertChecked = function (el) {
+ logDeprecatedAssert("assertChecked");
+
+ var element = el.getNode();
+
+ if (element && element.checked == true) {
+ broker.pass({'function':'Controller.assertChecked()'});
+ } else {
+ throw new Error("assert failed for checked element " + el.getInfo());
+ }
+
+ return true;
+};
+
+/**
+ * Assert that a provided checkbox is not checked
+ */
+MozMillController.prototype.assertNotChecked = function (el) {
+ logDeprecatedAssert("assertNotChecked");
+
+ var element = el.getNode();
+
+ if (!element) {
+ throw new Error("Could not find element" + el.getInfo());
+ }
+
+ if (!element.hasAttribute("checked") || element.checked != true) {
+ broker.pass({'function': 'Controller.assertNotChecked()'});
+ } else {
+ throw new Error("assert failed for not checked element " + el.getInfo());
+ }
+
+ return true;
+};
+
+/**
+ * Assert that an element's javascript property exists or has a particular value
+ *
+ * if val is undefined, will return true if the property exists.
+ * if val is specified, will return true if the property exists and has the correct value
+ */
+MozMillController.prototype.assertJSProperty = function (el, attrib, val) {
+ logDeprecatedAssert("assertJSProperty");
+
+ var element = el.getNode();
+
+ if (!element){
+ throw new Error("could not find element " + el.getInfo());
+ }
+
+ var value = element[attrib];
+ var res = (value !== undefined && (val === undefined ? true :
+ String(value) == String(val)));
+ if (res) {
+ broker.pass({'function':'Controller.assertJSProperty("' + el.getInfo() + '") : ' + val});
+ } else {
+ throw new Error("Controller.assertJSProperty(" + el.getInfo() + ") : " +
+ (val === undefined ? "property '" + attrib +
+ "' doesn't exist" : val + " == " + value));
+ }
+
+ return true;
+};
+
+/**
+ * Assert that an element's javascript property doesn't exist or doesn't have a particular value
+ *
+ * if val is undefined, will return true if the property doesn't exist.
+ * if val is specified, will return true if the property doesn't exist or doesn't have the specified value
+ */
+MozMillController.prototype.assertNotJSProperty = function (el, attrib, val) {
+ logDeprecatedAssert("assertNotJSProperty");
+
+ var element = el.getNode();
+
+ if (!element){
+ throw new Error("could not find element " + el.getInfo());
+ }
+
+ var value = element[attrib];
+ var res = (val === undefined ? value === undefined : String(value) != String(val));
+ if (res) {
+ broker.pass({'function':'Controller.assertNotProperty("' + el.getInfo() + '") : ' + val});
+ } else {
+ throw new Error("Controller.assertNotJSProperty(" + el.getInfo() + ") : " +
+ (val === undefined ? "property '" + attrib +
+ "' exists" : val + " != " + value));
+ }
+
+ return true;
+};
+
+/**
+ * Assert that an element's dom property exists or has a particular value
+ *
+ * if val is undefined, will return true if the property exists.
+ * if val is specified, will return true if the property exists and has the correct value
+ */
+MozMillController.prototype.assertDOMProperty = function (el, attrib, val) {
+ logDeprecatedAssert("assertDOMProperty");
+
+ var element = el.getNode();
+
+ if (!element){
+ throw new Error("could not find element " + el.getInfo());
+ }
+
+ var value, res = element.hasAttribute(attrib);
+ if (res && val !== undefined) {
+ value = element.getAttribute(attrib);
+ res = (String(value) == String(val));
+ }
+
+ if (res) {
+ broker.pass({'function':'Controller.assertDOMProperty("' + el.getInfo() + '") : ' + val});
+ } else {
+ throw new Error("Controller.assertDOMProperty(" + el.getInfo() + ") : " +
+ (val === undefined ? "property '" + attrib +
+ "' doesn't exist" : val + " == " + value));
+ }
+
+ return true;
+};
+
+/**
+ * Assert that an element's dom property doesn't exist or doesn't have a particular value
+ *
+ * if val is undefined, will return true if the property doesn't exist.
+ * if val is specified, will return true if the property doesn't exist or doesn't have the specified value
+ */
+MozMillController.prototype.assertNotDOMProperty = function (el, attrib, val) {
+ logDeprecatedAssert("assertNotDOMProperty");
+
+ var element = el.getNode();
+
+ if (!element) {
+ throw new Error("could not find element " + el.getInfo());
+ }
+
+ var value, res = element.hasAttribute(attrib);
+ if (res && val !== undefined) {
+ value = element.getAttribute(attrib);
+ res = (String(value) == String(val));
+ }
+
+ if (!res) {
+ broker.pass({'function':'Controller.assertNotDOMProperty("' + el.getInfo() + '") : ' + val});
+ } else {
+ throw new Error("Controller.assertNotDOMProperty(" + el.getInfo() + ") : " +
+ (val == undefined ? "property '" + attrib +
+ "' exists" : val + " == " + value));
+ }
+
+ return true;
+};
+
+/**
+ * Assert that a specified image has actually loaded. The Safari workaround results
+ * in additional requests for broken images (in Safari only) but works reliably
+ */
+MozMillController.prototype.assertImageLoaded = function (el) {
+ logDeprecatedAssert("assertImageLoaded");
+
+ var img = el.getNode();
+
+ if (!img || img.tagName != 'IMG') {
+ throw new Error('Controller.assertImageLoaded() failed.')
+ return false;
+ }
+
+ var comp = img.complete;
+ var ret = null; // Return value
+
+ // Workaround for Safari -- it only supports the
+ // complete attrib on script-created images
+ if (typeof comp == 'undefined') {
+ test = new Image();
+ // If the original image was successfully loaded,
+ // src for new one should be pulled from cache
+ test.src = img.src;
+ comp = test.complete;
+ }
+
+ // Check the complete attrib. Note the strict
+ // equality check -- we don't want undefined, null, etc.
+ // --------------------------
+ if (comp === false) {
+ // False -- Img failed to load in IE/Safari, or is
+ // still trying to load in FF
+ ret = false;
+ } else if (comp === true && img.naturalWidth == 0) {
+ // True, but image has no size -- image failed to
+ // load in FF
+ ret = false;
+ } else {
+ // Otherwise all we can do is assume everything's
+ // hunky-dory
+ ret = true;
+ }
+
+ if (ret) {
+ broker.pass({'function':'Controller.assertImageLoaded'});
+ } else {
+ throw new Error('Controller.assertImageLoaded() failed.')
+ }
+
+ return true;
+};
+
+/**
+ * Drag one element to the top x,y coords of another specified element
+ */
+MozMillController.prototype.mouseMove = function (doc, start, dest) {
+ // if one of these elements couldn't be looked up
+ if (typeof start != 'object'){
+ throw new Error("received bad coordinates");
+ }
+
+ if (typeof dest != 'object'){
+ throw new Error("received bad coordinates");
+ }
+
+ var triggerMouseEvent = function (element, clientX, clientY) {
+ clientX = clientX ? clientX: 0;
+ clientY = clientY ? clientY: 0;
+
+ // make the mouse understand where it is on the screen
+ var screenX = element.boxObject.screenX ? element.boxObject.screenX : 0;
+ var screenY = element.boxObject.screenY ? element.boxObject.screenY : 0;
+
+ var evt = element.ownerDocument.createEvent('MouseEvents');
+ if (evt.initMouseEvent) {
+ evt.initMouseEvent('mousemove', true, true, element.ownerDocument.defaultView,
+ 1, screenX, screenY, clientX, clientY);
+ } else {
+ evt.initEvent('mousemove', true, true);
+ }
+
+ element.dispatchEvent(evt);
+ };
+
+ // Do the initial move to the drag element position
+ triggerMouseEvent(doc.body, start[0], start[1]);
+ triggerMouseEvent(doc.body, dest[0], dest[1]);
+
+ broker.pass({'function':'Controller.mouseMove()'});
+ return true;
+}
+
+/**
+ * Drag an element to the specified offset on another element, firing mouse and
+ * drag events. Adapted from EventUtils.js synthesizeDrop()
+ *
+ * @deprecated Use the MozMillElement object
+ *
+ * @param {MozElement} aSrc
+ * Source element to be dragged
+ * @param {MozElement} aDest
+ * Destination element over which the drop occurs
+ * @param {Number} [aOffsetX=element.width/2]
+ * Relative x offset for dropping on the aDest element
+ * @param {Number} [aOffsetY=element.height/2]
+ * Relative y offset for dropping on the aDest element
+ * @param {DOMWindow} [aSourceWindow=this.element.ownerDocument.defaultView]
+ * Custom source Window to be used.
+ * @param {String} [aDropEffect="move"]
+ * Effect used for the drop event
+ * @param {Object[]} [aDragData]
+ * An array holding custom drag data to be used during the drag event
+ * Format: [{ type: "text/plain", "Text to drag"}, ...]
+ *
+ * @returns {String} the captured dropEffect
+ */
+MozMillController.prototype.dragToElement = function (aSrc, aDest, aOffsetX,
+ aOffsetY, aSourceWindow,
+ aDropEffect, aDragData) {
+ logDeprecated("controller.dragToElement", "Use the MozMillElement object.");
+ return aSrc.dragToElement(aDest, aOffsetX, aOffsetY, aSourceWindow, null,
+ aDropEffect, aDragData);
+};
+
+function Tabs(controller) {
+ this.controller = controller;
+}
+
+Tabs.prototype.getTab = function (index) {
+ return this.controller.browserObject.browsers[index].contentDocument;
+}
+
+Tabs.prototype.__defineGetter__("activeTab", function () {
+ return this.controller.browserObject.selectedBrowser.contentDocument;
+});
+
+Tabs.prototype.selectTab = function (index) {
+ // GO in to tab manager and grab the tab by index and call focus.
+}
+
+Tabs.prototype.findWindow = function (doc) {
+ for (var i = 0; i <= (this.controller.window.frames.length - 1); i++) {
+ if (this.controller.window.frames[i].document == doc) {
+ return this.controller.window.frames[i];
+ }
+ }
+
+ throw new Error("Cannot find window for document. Doc title == " + doc.title);
+}
+
+Tabs.prototype.getTabWindow = function (index) {
+ return this.findWindow(this.getTab(index));
+}
+
+Tabs.prototype.__defineGetter__("activeTabWindow", function () {
+ return this.findWindow(this.activeTab);
+});
+
+Tabs.prototype.__defineGetter__("length", function () {
+ return this.controller.browserObject.browsers.length;
+});
+
+Tabs.prototype.__defineGetter__("activeTabIndex", function () {
+ var browser = this.controller.browserObject;
+ return browser.tabContainer.selectedIndex;
+});
+
+Tabs.prototype.selectTabIndex = function (aIndex) {
+ var browser = this.controller.browserObject;
+ browser.selectTabAtIndex(aIndex);
+}
+
+function browserAdditions (controller) {
+ controller.tabs = new Tabs(controller);
+
+ controller.waitForPageLoad = function (aDocument, aTimeout, aInterval) {
+ var timeout = aTimeout || 30000;
+ var win = null;
+ var timed_out = false;
+
+ // If a user tries to do waitForPageLoad(2000), this will assign the
+ // interval the first arg which is most likely what they were expecting
+ if (typeof(aDocument) == "number"){
+ timeout = aDocument;
+ }
+
+ // If we have a real document use its default view
+ if (aDocument && (typeof(aDocument) === "object") &&
+ "defaultView" in aDocument)
+ win = aDocument.defaultView;
+
+ // If no document has been specified, fallback to the default view of the
+ // currently selected tab browser
+ win = win || this.browserObject.selectedBrowser.contentWindow;
+
+ // Wait until the content in the tab has been loaded
+ try {
+ this.waitFor(function () {
+ return windows.map.hasPageLoaded(utils.getWindowId(win));
+ }, "Timeout", timeout, aInterval);
+ }
+ catch (ex) {
+ if (!ex instanceof errors.TimeoutError) {
+ throw ex;
+ }
+ timed_out = true;
+ }
+ finally {
+ state = 'URI=' + win.document.location.href +
+ ', readyState=' + win.document.readyState;
+ message = "controller.waitForPageLoad(" + state + ")";
+
+ if (timed_out) {
+ throw new errors.AssertionError(message);
+ }
+
+ broker.pass({'function': message});
+ }
+ }
+}
+
+var controllerAdditions = {
+ 'navigator:browser' :browserAdditions
+};
+
+/**
+ * DEPRECATION WARNING
+ *
+ * The following methods have all been DEPRECATED as of Mozmill 2.0
+ */
+MozMillController.prototype.assertProperty = function (el, attrib, val) {
+ logDeprecatedAssert("assertProperty");
+
+ return this.assertJSProperty(el, attrib, val);
+};
+
+MozMillController.prototype.assertPropertyNotExist = function (el, attrib) {
+ logDeprecatedAssert("assertPropertyNotExist");
+ return this.assertNotJSProperty(el, attrib);
+};
+
+/**
+ * DEPRECATION WARNING
+ *
+ * The following methods have all been DEPRECATED as of Mozmill 2.0
+ * Use the MozMillElement object instead (https://developer.mozilla.org/en/Mozmill/Mozmill_Element_Object)
+ */
+MozMillController.prototype.select = function (aElement, index, option, value) {
+ logDeprecated("controller.select", "Use the MozMillElement object.");
+
+ return aElement.select(index, option, value);
+};
+
+MozMillController.prototype.keypress = function (aElement, aKey, aModifiers, aExpectedEvent) {
+ logDeprecated("controller.keypress", "Use the MozMillElement object.");
+
+ if (!aElement) {
+ aElement = new mozelement.MozMillElement("Elem", this.window);
+ }
+
+ return aElement.keypress(aKey, aModifiers, aExpectedEvent);
+}
+
+MozMillController.prototype.type = function (aElement, aText, aExpectedEvent) {
+ logDeprecated("controller.type", "Use the MozMillElement object.");
+
+ if (!aElement) {
+ aElement = new mozelement.MozMillElement("Elem", this.window);
+ }
+
+ var that = this;
+ var retval = true;
+ Array.forEach(aText, function (letter) {
+ if (!that.keypress(aElement, letter, {}, aExpectedEvent)) {
+ retval = false; }
+ });
+
+ return retval;
+}
+
+MozMillController.prototype.mouseEvent = function (aElement, aOffsetX, aOffsetY, aEvent, aExpectedEvent) {
+ logDeprecated("controller.mouseEvent", "Use the MozMillElement object.");
+
+ return aElement.mouseEvent(aOffsetX, aOffsetY, aEvent, aExpectedEvent);
+}
+
+MozMillController.prototype.click = function (aElement, left, top, expectedEvent) {
+ logDeprecated("controller.click", "Use the MozMillElement object.");
+
+ return aElement.click(left, top, expectedEvent);
+}
+
+MozMillController.prototype.doubleClick = function (aElement, left, top, expectedEvent) {
+ logDeprecated("controller.doubleClick", "Use the MozMillElement object.");
+
+ return aElement.doubleClick(left, top, expectedEvent);
+}
+
+MozMillController.prototype.mouseDown = function (aElement, button, left, top, expectedEvent) {
+ logDeprecated("controller.mouseDown", "Use the MozMillElement object.");
+
+ return aElement.mouseDown(button, left, top, expectedEvent);
+};
+
+MozMillController.prototype.mouseOut = function (aElement, button, left, top, expectedEvent) {
+ logDeprecated("controller.mouseOut", "Use the MozMillElement object.");
+
+ return aElement.mouseOut(button, left, top, expectedEvent);
+};
+
+MozMillController.prototype.mouseOver = function (aElement, button, left, top, expectedEvent) {
+ logDeprecated("controller.mouseOver", "Use the MozMillElement object.");
+
+ return aElement.mouseOver(button, left, top, expectedEvent);
+};
+
+MozMillController.prototype.mouseUp = function (aElement, button, left, top, expectedEvent) {
+ logDeprecated("controller.mouseUp", "Use the MozMillElement object.");
+
+ return aElement.mouseUp(button, left, top, expectedEvent);
+};
+
+MozMillController.prototype.middleClick = function (aElement, left, top, expectedEvent) {
+ logDeprecated("controller.middleClick", "Use the MozMillElement object.");
+
+ return aElement.middleClick(aElement, left, top, expectedEvent);
+}
+
+MozMillController.prototype.rightClick = function (aElement, left, top, expectedEvent) {
+ logDeprecated("controller.rightClick", "Use the MozMillElement object.");
+
+ return aElement.rightClick(left, top, expectedEvent);
+}
+
+MozMillController.prototype.check = function (aElement, state) {
+ logDeprecated("controller.check", "Use the MozMillElement object.");
+
+ return aElement.check(state);
+}
+
+MozMillController.prototype.radio = function (aElement) {
+ logDeprecated("controller.radio", "Use the MozMillElement object.");
+
+ return aElement.select();
+}
+
+MozMillController.prototype.waitThenClick = function (aElement, timeout, interval) {
+ logDeprecated("controller.waitThenClick", "Use the MozMillElement object.");
+
+ return aElement.waitThenClick(timeout, interval);
+}
+
+MozMillController.prototype.waitForElement = function (aElement, timeout, interval) {
+ logDeprecated("controller.waitForElement", "Use the MozMillElement object.");
+
+ return aElement.waitForElement(timeout, interval);
+}
+
+MozMillController.prototype.waitForElementNotPresent = function (aElement, timeout, interval) {
+ logDeprecated("controller.waitForElementNotPresent", "Use the MozMillElement object.");
+
+ return aElement.waitForElementNotPresent(timeout, interval);
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/driver/elementslib.js b/services/sync/tps/extensions/mozmill/resource/driver/elementslib.js
new file mode 100644
index 000000000..4bf35a384
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/driver/elementslib.js
@@ -0,0 +1,537 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ["ID", "Link", "XPath", "Selector", "Name", "Anon", "AnonXPath",
+ "Lookup", "_byID", "_byName", "_byAttrib", "_byAnonAttrib",
+ ];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/Services.jsm");
+
+var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils);
+var strings = {}; Cu.import('resource://mozmill/stdlib/strings.js', strings);
+var arrays = {}; Cu.import('resource://mozmill/stdlib/arrays.js', arrays);
+var json2 = {}; Cu.import('resource://mozmill/stdlib/json2.js', json2);
+var withs = {}; Cu.import('resource://mozmill/stdlib/withs.js', withs);
+var dom = {}; Cu.import('resource://mozmill/stdlib/dom.js', dom);
+var objects = {}; Cu.import('resource://mozmill/stdlib/objects.js', objects);
+
+var countQuotes = function (str) {
+ var count = 0;
+ var i = 0;
+
+ while (i < str.length) {
+ i = str.indexOf('"', i);
+ if (i != -1) {
+ count++;
+ i++;
+ } else {
+ break;
+ }
+ }
+
+ return count;
+};
+
+/**
+ * smartSplit()
+ *
+ * Takes a lookup string as input and returns
+ * a list of each node in the string
+ */
+var smartSplit = function (str) {
+ // Ensure we have an even number of quotes
+ if (countQuotes(str) % 2 != 0) {
+ throw new Error ("Invalid Lookup Expression");
+ }
+
+ /**
+ * This regex matches a single "node" in a lookup string.
+ * In otherwords, it matches the part between the two '/'s
+ *
+ * Regex Explanation:
+ * \/ - start matching at the first forward slash
+ * ([^\/"]*"[^"]*")* - match as many pairs of quotes as possible until we hit a slash (ignore slashes inside quotes)
+ * [^\/]* - match the remainder of text outside of last quote but before next slash
+ */
+ var re = /\/([^\/"]*"[^"]*")*[^\/]*/g
+ var ret = []
+ var match = re.exec(str);
+
+ while (match != null) {
+ ret.push(match[0].replace(/^\//, ""));
+ match = re.exec(str);
+ }
+
+ return ret;
+};
+
+/**
+ * defaultDocuments()
+ *
+ * Returns a list of default documents in which to search for elements
+ * if no document is provided
+ */
+function defaultDocuments() {
+ var win = Services.wm.getMostRecentWindow("navigator:browser");
+
+ return [
+ win.document,
+ utils.getBrowserObject(win).selectedBrowser.contentWindow.document
+ ];
+};
+
+/**
+ * nodeSearch()
+ *
+ * Takes an optional document, callback and locator string
+ * Returns a handle to the located element or null
+ */
+function nodeSearch(doc, func, string) {
+ if (doc != undefined) {
+ var documents = [doc];
+ } else {
+ var documents = defaultDocuments();
+ }
+
+ var e = null;
+ var element = null;
+
+ //inline function to recursively find the element in the DOM, cross frame.
+ var search = function (win, func, string) {
+ if (win == null) {
+ return;
+ }
+
+ //do the lookup in the current window
+ element = func.call(win, string);
+
+ if (!element || (element.length == 0)) {
+ var frames = win.frames;
+ for (var i = 0; i < frames.length; i++) {
+ search(frames[i], func, string);
+ }
+ } else {
+ e = element;
+ }
+ };
+
+ for (var i = 0; i < documents.length; ++i) {
+ var win = documents[i].defaultView;
+ search(win, func, string);
+ if (e) {
+ break;
+ }
+ }
+
+ return e;
+};
+
+/**
+ * Selector()
+ *
+ * Finds an element by selector string
+ */
+function Selector(_document, selector, index) {
+ if (selector == undefined) {
+ throw new Error('Selector constructor did not recieve enough arguments.');
+ }
+
+ this.selector = selector;
+
+ this.getNodeForDocument = function (s) {
+ return this.document.querySelectorAll(s);
+ };
+
+ var nodes = nodeSearch(_document, this.getNodeForDocument, this.selector);
+
+ return nodes ? nodes[index || 0] : null;
+};
+
+/**
+ * ID()
+ *
+ * Finds an element by ID
+ */
+function ID(_document, nodeID) {
+ if (nodeID == undefined) {
+ throw new Error('ID constructor did not recieve enough arguments.');
+ }
+
+ this.getNodeForDocument = function (nodeID) {
+ return this.document.getElementById(nodeID);
+ };
+
+ return nodeSearch(_document, this.getNodeForDocument, nodeID);
+};
+
+/**
+ * Link()
+ *
+ * Finds a link by innerHTML
+ */
+function Link(_document, linkName) {
+ if (linkName == undefined) {
+ throw new Error('Link constructor did not recieve enough arguments.');
+ }
+
+ this.getNodeForDocument = function (linkName) {
+ var getText = function (el) {
+ var text = "";
+
+ if (el.nodeType == 3) { //textNode
+ if (el.data != undefined) {
+ text = el.data;
+ } else {
+ text = el.innerHTML;
+ }
+
+ text = text.replace(/n|r|t/g, " ");
+ }
+ else if (el.nodeType == 1) { //elementNode
+ for (var i = 0; i < el.childNodes.length; i++) {
+ var child = el.childNodes.item(i);
+ text += getText(child);
+ }
+
+ if (el.tagName == "P" || el.tagName == "BR" ||
+ el.tagName == "HR" || el.tagName == "DIV") {
+ text += "\n";
+ }
+ }
+
+ return text;
+ };
+
+ //sometimes the windows won't have this function
+ try {
+ var links = this.document.getElementsByTagName('a');
+ } catch (e) {
+ // ADD LOG LINE mresults.write('Error: '+ e, 'lightred');
+ }
+
+ for (var i = 0; i < links.length; i++) {
+ var el = links[i];
+ //if (getText(el).indexOf(this.linkName) != -1) {
+ if (el.innerHTML.indexOf(linkName) != -1) {
+ return el;
+ }
+ }
+
+ return null;
+ };
+
+ return nodeSearch(_document, this.getNodeForDocument, linkName);
+};
+
+/**
+ * XPath()
+ *
+ * Finds an element by XPath
+ */
+function XPath(_document, expr) {
+ if (expr == undefined) {
+ throw new Error('XPath constructor did not recieve enough arguments.');
+ }
+
+ this.getNodeForDocument = function (s) {
+ var aNode = this.document;
+ var aExpr = s;
+ var xpe = null;
+
+ if (this.document.defaultView == null) {
+ xpe = new getMethodInWindows('XPathEvaluator')();
+ } else {
+ xpe = new this.document.defaultView.XPathEvaluator();
+ }
+
+ var nsResolver = xpe.createNSResolver(aNode.ownerDocument == null ? aNode.documentElement
+ : aNode.ownerDocument.documentElement);
+ var result = xpe.evaluate(aExpr, aNode, nsResolver, 0, null);
+ var found = [];
+ var res;
+
+ while (res = result.iterateNext()) {
+ found.push(res);
+ }
+
+ return found[0];
+ };
+
+ return nodeSearch(_document, this.getNodeForDocument, expr);
+};
+
+/**
+ * Name()
+ *
+ * Finds an element by Name
+ */
+function Name(_document, nName) {
+ if (nName == undefined) {
+ throw new Error('Name constructor did not recieve enough arguments.');
+ }
+
+ this.getNodeForDocument = function (s) {
+ try{
+ var els = this.document.getElementsByName(s);
+ if (els.length > 0) {
+ return els[0];
+ }
+ } catch (e) {
+ }
+
+ return null;
+ };
+
+ return nodeSearch(_document, this.getNodeForDocument, nName);
+};
+
+
+var _returnResult = function (results) {
+ if (results.length == 0) {
+ return null
+ }
+ else if (results.length == 1) {
+ return results[0];
+ } else {
+ return results;
+ }
+}
+
+var _forChildren = function (element, name, value) {
+ var results = [];
+ var nodes = Array.from(element.childNodes).filter(e => e);
+
+ for (var i in nodes) {
+ var n = nodes[i];
+ if (n[name] == value) {
+ results.push(n);
+ }
+ }
+
+ return results;
+}
+
+var _forAnonChildren = function (_document, element, name, value) {
+ var results = [];
+ var nodes = Array.from(_document.getAnoymousNodes(element)).filter(e => e);
+
+ for (var i in nodes ) {
+ var n = nodes[i];
+ if (n[name] == value) {
+ results.push(n);
+ }
+ }
+
+ return results;
+}
+
+var _byID = function (_document, parent, value) {
+ return _returnResult(_forChildren(parent, 'id', value));
+}
+
+var _byName = function (_document, parent, value) {
+ return _returnResult(_forChildren(parent, 'tagName', value));
+}
+
+var _byAttrib = function (parent, attributes) {
+ var results = [];
+ var nodes = parent.childNodes;
+
+ for (var i in nodes) {
+ var n = nodes[i];
+ requirementPass = 0;
+ requirementLength = 0;
+
+ for (var a in attributes) {
+ requirementLength++;
+ try {
+ if (n.getAttribute(a) == attributes[a]) {
+ requirementPass++;
+ }
+ } catch (e) {
+ // Workaround any bugs in custom attribute crap in XUL elements
+ }
+ }
+
+ if (requirementPass == requirementLength) {
+ results.push(n);
+ }
+ }
+
+ return _returnResult(results)
+}
+
+var _byAnonAttrib = function (_document, parent, attributes) {
+ var results = [];
+
+ if (objects.getLength(attributes) == 1) {
+ for (var i in attributes) {
+ var k = i;
+ var v = attributes[i];
+ }
+
+ var result = _document.getAnonymousElementByAttribute(parent, k, v);
+ if (result) {
+ return result;
+ }
+ }
+
+ var nodes = Array.from(_document.getAnonymousNodes(parent)).filter(n => n.getAttribute);
+
+ function resultsForNodes (nodes) {
+ for (var i in nodes) {
+ var n = nodes[i];
+ requirementPass = 0;
+ requirementLength = 0;
+
+ for (var a in attributes) {
+ requirementLength++;
+ if (n.getAttribute(a) == attributes[a]) {
+ requirementPass++;
+ }
+ }
+
+ if (requirementPass == requirementLength) {
+ results.push(n);
+ }
+ }
+ }
+
+ resultsForNodes(nodes);
+ if (results.length == 0) {
+ resultsForNodes(Array.from(parent.childNodes).filter(n => n != undefined && n.getAttribute));
+ }
+
+ return _returnResult(results)
+}
+
+var _byIndex = function (_document, parent, i) {
+ if (parent instanceof Array) {
+ return parent[i];
+ }
+
+ return parent.childNodes[i];
+}
+
+var _anonByName = function (_document, parent, value) {
+ return _returnResult(_forAnonChildren(_document, parent, 'tagName', value));
+}
+
+var _anonByAttrib = function (_document, parent, value) {
+ return _byAnonAttrib(_document, parent, value);
+}
+
+var _anonByIndex = function (_document, parent, i) {
+ return _document.getAnonymousNodes(parent)[i];
+}
+
+/**
+ * Lookup()
+ *
+ * Finds an element by Lookup expression
+ */
+function Lookup(_document, expression) {
+ if (expression == undefined) {
+ throw new Error('Lookup constructor did not recieve enough arguments.');
+ }
+
+ var expSplit = smartSplit(expression).filter(e => e != '');
+ expSplit.unshift(_document);
+
+ var nCases = {'id':_byID, 'name':_byName, 'attrib':_byAttrib, 'index':_byIndex};
+ var aCases = {'name':_anonByName, 'attrib':_anonByAttrib, 'index':_anonByIndex};
+
+ /**
+ * Reduces the lookup expression
+ * @param {Object} parentNode
+ * Parent node (previousValue of the formerly executed reduce callback)
+ * @param {String} exp
+ * Lookup expression for the parents child node
+ *
+ * @returns {Object} Node found by the given expression
+ */
+ var reduceLookup = function (parentNode, exp) {
+ // Abort in case the parent node was not found
+ if (!parentNode) {
+ return false;
+ }
+
+ // Handle case where only index is provided
+ var cases = nCases;
+
+ // Handle ending index before any of the expression gets mangled
+ if (withs.endsWith(exp, ']')) {
+ var expIndex = json2.JSON.parse(strings.vslice(exp, '[', ']'));
+ }
+
+ // Handle anon
+ if (withs.startsWith(exp, 'anon')) {
+ exp = strings.vslice(exp, '(', ')');
+ cases = aCases;
+ }
+
+ if (withs.startsWith(exp, '[')) {
+ try {
+ var obj = json2.JSON.parse(strings.vslice(exp, '[', ']'));
+ } catch (e) {
+ throw new SyntaxError(e + '. String to be parsed was || ' +
+ strings.vslice(exp, '[', ']') + ' ||');
+ }
+
+ var r = cases['index'](_document, parentNode, obj);
+ if (r == null) {
+ throw new SyntaxError('Expression "' + exp +
+ '" returned null. Anonymous == ' + (cases == aCases));
+ }
+
+ return r;
+ }
+
+ for (var c in cases) {
+ if (withs.startsWith(exp, c)) {
+ try {
+ var obj = json2.JSON.parse(strings.vslice(exp, '(', ')'))
+ } catch (e) {
+ throw new SyntaxError(e + '. String to be parsed was || ' +
+ strings.vslice(exp, '(', ')') + ' ||');
+ }
+ var result = cases[c](_document, parentNode, obj);
+ }
+ }
+
+ if (!result) {
+ if (withs.startsWith(exp, '{')) {
+ try {
+ var obj = json2.JSON.parse(exp);
+ } catch (e) {
+ throw new SyntaxError(e + '. String to be parsed was || ' + exp + ' ||');
+ }
+
+ if (cases == aCases) {
+ var result = _anonByAttrib(_document, parentNode, obj);
+ } else {
+ var result = _byAttrib(parentNode, obj);
+ }
+ }
+ }
+
+ // Final return
+ if (expIndex) {
+ // TODO: Check length and raise error
+ return result[expIndex];
+ } else {
+ // TODO: Check length and raise error
+ return result;
+ }
+
+ // Maybe we should cause an exception here
+ return false;
+ };
+
+ return expSplit.reduce(reduceLookup);
+};
diff --git a/services/sync/tps/extensions/mozmill/resource/driver/mozelement.js b/services/sync/tps/extensions/mozmill/resource/driver/mozelement.js
new file mode 100644
index 000000000..850c86523
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/driver/mozelement.js
@@ -0,0 +1,1163 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ["Elem", "Selector", "ID", "Link", "XPath", "Name", "Lookup",
+ "MozMillElement", "MozMillCheckBox", "MozMillRadio", "MozMillDropList",
+ "MozMillTextBox", "subclasses"
+ ];
+
+const NAMESPACE_XUL = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul";
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+var EventUtils = {}; Cu.import('resource://mozmill/stdlib/EventUtils.js', EventUtils);
+
+var assertions = {}; Cu.import('resource://mozmill/modules/assertions.js', assertions);
+var broker = {}; Cu.import('resource://mozmill/driver/msgbroker.js', broker);
+var elementslib = {}; Cu.import('resource://mozmill/driver/elementslib.js', elementslib);
+var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils);
+
+var assert = new assertions.Assert();
+
+// A list of all the subclasses available. Shared modules can push their own subclasses onto this list
+var subclasses = [MozMillCheckBox, MozMillRadio, MozMillDropList, MozMillTextBox];
+
+/**
+ * createInstance()
+ *
+ * Returns an new instance of a MozMillElement
+ * The type of the element is automatically determined
+ */
+function createInstance(locatorType, locator, elem, document) {
+ var args = { "document": document, "element": elem };
+
+ // If we already have an element lets determine the best MozMillElement type
+ if (elem) {
+ for (var i = 0; i < subclasses.length; ++i) {
+ if (subclasses[i].isType(elem)) {
+ return new subclasses[i](locatorType, locator, args);
+ }
+ }
+ }
+
+ // By default we create a base MozMillElement
+ if (MozMillElement.isType(elem)) {
+ return new MozMillElement(locatorType, locator, args);
+ }
+
+ throw new Error("Unsupported element type " + locatorType + ": " + locator);
+}
+
+var Elem = function (node) {
+ return createInstance("Elem", node, node);
+};
+
+var Selector = function (document, selector, index) {
+ return createInstance("Selector", selector, elementslib.Selector(document, selector, index), document);
+};
+
+var ID = function (document, nodeID) {
+ return createInstance("ID", nodeID, elementslib.ID(document, nodeID), document);
+};
+
+var Link = function (document, linkName) {
+ return createInstance("Link", linkName, elementslib.Link(document, linkName), document);
+};
+
+var XPath = function (document, expr) {
+ return createInstance("XPath", expr, elementslib.XPath(document, expr), document);
+};
+
+var Name = function (document, nName) {
+ return createInstance("Name", nName, elementslib.Name(document, nName), document);
+};
+
+var Lookup = function (document, expression) {
+ var elem = createInstance("Lookup", expression, elementslib.Lookup(document, expression), document);
+
+ // Bug 864268 - Expose the expression property to maintain backwards compatibility
+ elem.expression = elem._locator;
+
+ return elem;
+};
+
+/**
+ * MozMillElement
+ * The base class for all mozmill elements
+ */
+function MozMillElement(locatorType, locator, args) {
+ args = args || {};
+ this._locatorType = locatorType;
+ this._locator = locator;
+ this._element = args["element"];
+ this._owner = args["owner"];
+
+ this._document = this._element ? this._element.ownerDocument : args["document"];
+ this._defaultView = this._document ? this._document.defaultView : null;
+
+ // Used to maintain backwards compatibility with controller.js
+ this.isElement = true;
+}
+
+// Static method that returns true if node is of this element type
+MozMillElement.isType = function (node) {
+ return true;
+};
+
+// This getter is the magic behind lazy loading (note distinction between _element and element)
+MozMillElement.prototype.__defineGetter__("element", function () {
+ // If the document is invalid (e.g. reload of the page), invalidate the cached
+ // element and update the document cache
+ if (this._defaultView && this._defaultView.document !== this._document) {
+ this._document = this._defaultView.document;
+ this._element = undefined;
+ }
+
+ if (this._element == undefined) {
+ if (elementslib[this._locatorType]) {
+ this._element = elementslib[this._locatorType](this._document, this._locator);
+ } else if (this._locatorType == "Elem") {
+ this._element = this._locator;
+ } else {
+ throw new Error("Unknown locator type: " + this._locatorType);
+ }
+ }
+
+ return this._element;
+});
+
+/**
+ * Drag an element to the specified offset on another element, firing mouse and
+ * drag events. Adapted from EventUtils.js synthesizeDrop()
+ *
+ * By default it will drag the source element over the destination's element
+ * center with a "move" dropEffect.
+ *
+ * @param {MozElement} aElement
+ * Destination element over which the drop occurs
+ * @param {Number} [aOffsetX=aElement.width/2]
+ * Relative x offset for dropping on aElement
+ * @param {Number} [aOffsetY=aElement.height/2]
+ * Relative y offset for dropping on aElement
+ * @param {DOMWindow} [aSourceWindow=this.element.ownerDocument.defaultView]
+ * Custom source Window to be used.
+ * @param {DOMWindow} [aDestWindow=aElement.getNode().ownerDocument.defaultView]
+ * Custom destination Window to be used.
+ * @param {String} [aDropEffect="move"]
+ * Possible values: copy, move, link, none
+ * @param {Object[]} [aDragData]
+ * An array holding custom drag data to be used during the drag event
+ * Format: [{ type: "text/plain", "Text to drag"}, ...]
+ *
+ * @returns {String} the captured dropEffect
+ */
+MozMillElement.prototype.dragToElement = function(aElement, aOffsetX, aOffsetY,
+ aSourceWindow, aDestWindow,
+ aDropEffect, aDragData) {
+ if (!this.element) {
+ throw new Error("Could not find element " + this.getInfo());
+ }
+ if (!aElement) {
+ throw new Error("Missing destination element");
+ }
+
+ var srcNode = this.element;
+ var destNode = aElement.getNode();
+ var srcWindow = aSourceWindow ||
+ (srcNode.ownerDocument ? srcNode.ownerDocument.defaultView
+ : srcNode);
+ var destWindow = aDestWindow ||
+ (destNode.ownerDocument ? destNode.ownerDocument.defaultView
+ : destNode);
+
+ var srcRect = srcNode.getBoundingClientRect();
+ var srcCoords = {
+ x: srcRect.width / 2,
+ y: srcRect.height / 2
+ };
+ var destRect = destNode.getBoundingClientRect();
+ var destCoords = {
+ x: (!aOffsetX || isNaN(aOffsetX)) ? (destRect.width / 2) : aOffsetX,
+ y: (!aOffsetY || isNaN(aOffsetY)) ? (destRect.height / 2) : aOffsetY
+ };
+
+ var windowUtils = destWindow.QueryInterface(Ci.nsIInterfaceRequestor)
+ .getInterface(Ci.nsIDOMWindowUtils);
+ var ds = Cc["@mozilla.org/widget/dragservice;1"].getService(Ci.nsIDragService);
+
+ var dataTransfer;
+ var trapDrag = function (event) {
+ srcWindow.removeEventListener("dragstart", trapDrag, true);
+ dataTransfer = event.dataTransfer;
+
+ if (!aDragData) {
+ return;
+ }
+
+ for (var i = 0; i < aDragData.length; i++) {
+ var item = aDragData[i];
+ for (var j = 0; j < item.length; j++) {
+ dataTransfer.mozSetDataAt(item[j].type, item[j].data, i);
+ }
+ }
+
+ dataTransfer.dropEffect = aDropEffect || "move";
+ event.preventDefault();
+ event.stopPropagation();
+ }
+
+ ds.startDragSession();
+
+ try {
+ srcWindow.addEventListener("dragstart", trapDrag, true);
+ EventUtils.synthesizeMouse(srcNode, srcCoords.x, srcCoords.y,
+ { type: "mousedown" }, srcWindow);
+ EventUtils.synthesizeMouse(destNode, destCoords.x, destCoords.y,
+ { type: "mousemove" }, destWindow);
+
+ var event = destWindow.document.createEvent("DragEvent");
+ event.initDragEvent("dragenter", true, true, destWindow, 0, 0, 0, 0, 0,
+ false, false, false, false, 0, null, dataTransfer);
+ event.initDragEvent("dragover", true, true, destWindow, 0, 0, 0, 0, 0,
+ false, false, false, false, 0, null, dataTransfer);
+ event.initDragEvent("drop", true, true, destWindow, 0, 0, 0, 0, 0,
+ false, false, false, false, 0, null, dataTransfer);
+ windowUtils.dispatchDOMEventViaPresShell(destNode, event, true);
+
+ EventUtils.synthesizeMouse(destNode, destCoords.x, destCoords.y,
+ { type: "mouseup" }, destWindow);
+
+ return dataTransfer.dropEffect;
+ } finally {
+ ds.endDragSession(true);
+ }
+
+};
+
+// Returns the actual wrapped DOM node
+MozMillElement.prototype.getNode = function () {
+ return this.element;
+};
+
+MozMillElement.prototype.getInfo = function () {
+ return this._locatorType + ": " + this._locator;
+};
+
+/**
+ * Sometimes an element which once existed will no longer exist in the DOM
+ * This function re-searches for the element
+ */
+MozMillElement.prototype.exists = function () {
+ this._element = undefined;
+ if (this.element) {
+ return true;
+ }
+
+ return false;
+};
+
+/**
+ * Synthesize a keypress event on the given element
+ *
+ * @param {string} aKey
+ * Key to use for synthesizing the keypress event. It can be a simple
+ * character like "k" or a string like "VK_ESCAPE" for command keys
+ * @param {object} aModifiers
+ * Information about the modifier keys to send
+ * Elements: accelKey - Hold down the accelerator key (ctrl/meta)
+ * [optional - default: false]
+ * altKey - Hold down the alt key
+ * [optional - default: false]
+ * ctrlKey - Hold down the ctrl key
+ * [optional - default: false]
+ * metaKey - Hold down the meta key (command key on Mac)
+ * [optional - default: false]
+ * shiftKey - Hold down the shift key
+ * [optional - default: false]
+ * @param {object} aExpectedEvent
+ * Information about the expected event to occur
+ * Elements: target - Element which should receive the event
+ * [optional - default: current element]
+ * type - Type of the expected key event
+ */
+MozMillElement.prototype.keypress = function (aKey, aModifiers, aExpectedEvent) {
+ if (!this.element) {
+ throw new Error("Could not find element " + this.getInfo());
+ }
+
+ var win = this.element.ownerDocument ? this.element.ownerDocument.defaultView
+ : this.element;
+ this.element.focus();
+
+ if (aExpectedEvent) {
+ if (!aExpectedEvent.type) {
+ throw new Error(arguments.callee.name + ": Expected event type not specified");
+ }
+
+ var target = aExpectedEvent.target ? aExpectedEvent.target.getNode()
+ : this.element;
+ EventUtils.synthesizeKeyExpectEvent(aKey, aModifiers || {}, target, aExpectedEvent.type,
+ "MozMillElement.keypress()", win);
+ } else {
+ EventUtils.synthesizeKey(aKey, aModifiers || {}, win);
+ }
+
+ broker.pass({'function':'MozMillElement.keypress()'});
+
+ return true;
+};
+
+
+/**
+ * Synthesize a general mouse event on the given element
+ *
+ * @param {number} aOffsetX
+ * Relative x offset in the elements bounds to click on
+ * @param {number} aOffsetY
+ * Relative y offset in the elements bounds to click on
+ * @param {object} aEvent
+ * Information about the event to send
+ * Elements: accelKey - Hold down the accelerator key (ctrl/meta)
+ * [optional - default: false]
+ * altKey - Hold down the alt key
+ * [optional - default: false]
+ * button - Mouse button to use
+ * [optional - default: 0]
+ * clickCount - Number of counts to click
+ * [optional - default: 1]
+ * ctrlKey - Hold down the ctrl key
+ * [optional - default: false]
+ * metaKey - Hold down the meta key (command key on Mac)
+ * [optional - default: false]
+ * shiftKey - Hold down the shift key
+ * [optional - default: false]
+ * type - Type of the mouse event ('click', 'mousedown',
+ * 'mouseup', 'mouseover', 'mouseout')
+ * [optional - default: 'mousedown' + 'mouseup']
+ * @param {object} aExpectedEvent
+ * Information about the expected event to occur
+ * Elements: target - Element which should receive the event
+ * [optional - default: current element]
+ * type - Type of the expected mouse event
+ */
+MozMillElement.prototype.mouseEvent = function (aOffsetX, aOffsetY, aEvent, aExpectedEvent) {
+ if (!this.element) {
+ throw new Error(arguments.callee.name + ": could not find element " + this.getInfo());
+ }
+
+ if ("document" in this.element) {
+ throw new Error("A window cannot be a target for mouse events.");
+ }
+
+ var rect = this.element.getBoundingClientRect();
+
+ if (!aOffsetX || isNaN(aOffsetX)) {
+ aOffsetX = rect.width / 2;
+ }
+
+ if (!aOffsetY || isNaN(aOffsetY)) {
+ aOffsetY = rect.height / 2;
+ }
+
+ // Scroll element into view otherwise the click will fail
+ if ("scrollIntoView" in this.element)
+ this.element.scrollIntoView();
+
+ if (aExpectedEvent) {
+ // The expected event type has to be set
+ if (!aExpectedEvent.type) {
+ throw new Error(arguments.callee.name + ": Expected event type not specified");
+ }
+
+ // If no target has been specified use the specified element
+ var target = aExpectedEvent.target ? aExpectedEvent.target.getNode()
+ : this.element;
+ if (!target) {
+ throw new Error(arguments.callee.name + ": could not find element " +
+ aExpectedEvent.target.getInfo());
+ }
+
+ EventUtils.synthesizeMouseExpectEvent(this.element, aOffsetX, aOffsetY, aEvent,
+ target, aExpectedEvent.type,
+ "MozMillElement.mouseEvent()",
+ this.element.ownerDocument.defaultView);
+ } else {
+ EventUtils.synthesizeMouse(this.element, aOffsetX, aOffsetY, aEvent,
+ this.element.ownerDocument.defaultView);
+ }
+
+ // Bug 555347
+ // We don't know why this sleep is necessary but more investigation is needed
+ // before it can be removed
+ utils.sleep(0);
+
+ return true;
+};
+
+/**
+ * Synthesize a mouse click event on the given element
+ */
+MozMillElement.prototype.click = function (aOffsetX, aOffsetY, aExpectedEvent) {
+ // Handle menu items differently
+ if (this.element && this.element.tagName == "menuitem") {
+ this.element.click();
+ } else {
+ this.mouseEvent(aOffsetX, aOffsetY, {}, aExpectedEvent);
+ }
+
+ broker.pass({'function':'MozMillElement.click()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a double click on the given element
+ */
+MozMillElement.prototype.doubleClick = function (aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {clickCount: 2}, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.doubleClick()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a mouse down event on the given element
+ */
+MozMillElement.prototype.mouseDown = function (aButton, aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {button: aButton, type: "mousedown"}, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.mouseDown()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a mouse out event on the given element
+ */
+MozMillElement.prototype.mouseOut = function (aButton, aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {button: aButton, type: "mouseout"}, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.mouseOut()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a mouse over event on the given element
+ */
+MozMillElement.prototype.mouseOver = function (aButton, aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {button: aButton, type: "mouseover"}, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.mouseOver()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a mouse up event on the given element
+ */
+MozMillElement.prototype.mouseUp = function (aButton, aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {button: aButton, type: "mouseup"}, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.mouseUp()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a mouse middle click event on the given element
+ */
+MozMillElement.prototype.middleClick = function (aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {button: 1}, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.middleClick()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a mouse right click event on the given element
+ */
+MozMillElement.prototype.rightClick = function (aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {type : "contextmenu", button: 2 }, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.rightClick()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a general touch event on the given element
+ *
+ * @param {Number} [aOffsetX=aElement.width / 2]
+ * Relative x offset in the elements bounds to click on
+ * @param {Number} [aOffsetY=aElement.height / 2]
+ * Relative y offset in the elements bounds to click on
+ * @param {Object} [aEvent]
+ * Information about the event to send
+ * @param {Boolean} [aEvent.altKey=false]
+ * A Boolean value indicating whether or not the alt key was down when
+ * the touch event was fired
+ * @param {Number} [aEvent.angle=0]
+ * The angle (in degrees) that the ellipse described by rx and
+ * ry must be rotated, clockwise, to most accurately cover the area
+ * of contact between the user and the surface.
+ * @param {Touch[]} [aEvent.changedTouches]
+ * A TouchList of all the Touch objects representing individual points of
+ * contact whose states changed between the previous touch event and
+ * this one
+ * @param {Boolean} [aEvent.ctrlKey]
+ * A Boolean value indicating whether or not the control key was down
+ * when the touch event was fired
+ * @param {Number} [aEvent.force=1]
+ * The amount of pressure being applied to the surface by the user, as a
+ * float between 0.0 (no pressure) and 1.0 (maximum pressure)
+ * @param {Number} [aEvent.id=0]
+ * A unique identifier for this Touch object. A given touch (say, by a
+ * finger) will have the same identifier for the duration of its movement
+ * around the surface. This lets you ensure that you're tracking the same
+ * touch all the time
+ * @param {Boolean} [aEvent.metaKey]
+ * A Boolean value indicating whether or not the meta key was down when
+ * the touch event was fired.
+ * @param {Number} [aEvent.rx=1]
+ * The X radius of the ellipse that most closely circumscribes the area
+ * of contact with the screen.
+ * @param {Number} [aEvent.ry=1]
+ * The Y radius of the ellipse that most closely circumscribes the area
+ * of contact with the screen.
+ * @param {Boolean} [aEvent.shiftKey]
+ * A Boolean value indicating whether or not the shift key was down when
+ * the touch event was fired
+ * @param {Touch[]} [aEvent.targetTouches]
+ * A TouchList of all the Touch objects that are both currently in
+ * contact with the touch surface and were also started on the same
+ * element that is the target of the event
+ * @param {Touch[]} [aEvent.touches]
+ * A TouchList of all the Touch objects representing all current points
+ * of contact with the surface, regardless of target or changed status
+ * @param {Number} [aEvent.type=*|touchstart|touchend|touchmove|touchenter|touchleave|touchcancel]
+ * The type of touch event that occurred
+ * @param {Element} [aEvent.target]
+ * The target of the touches associated with this event. This target
+ * corresponds to the target of all the touches in the targetTouches
+ * attribute, but note that other touches in this event may have a
+ * different target. To be careful, you should use the target associated
+ * with individual touches
+ */
+MozMillElement.prototype.touchEvent = function (aOffsetX, aOffsetY, aEvent) {
+ if (!this.element) {
+ throw new Error(arguments.callee.name + ": could not find element " + this.getInfo());
+ }
+
+ if ("document" in this.element) {
+ throw new Error("A window cannot be a target for touch events.");
+ }
+
+ var rect = this.element.getBoundingClientRect();
+
+ if (!aOffsetX || isNaN(aOffsetX)) {
+ aOffsetX = rect.width / 2;
+ }
+
+ if (!aOffsetY || isNaN(aOffsetY)) {
+ aOffsetY = rect.height / 2;
+ }
+
+ // Scroll element into view otherwise the click will fail
+ if ("scrollIntoView" in this.element) {
+ this.element.scrollIntoView();
+ }
+
+ EventUtils.synthesizeTouch(this.element, aOffsetX, aOffsetY, aEvent,
+ this.element.ownerDocument.defaultView);
+
+ return true;
+};
+
+/**
+ * Synthesize a touch tap event on the given element
+ *
+ * @param {Number} [aOffsetX=aElement.width / 2]
+ * Left offset in px where the event is triggered
+ * @param {Number} [aOffsetY=aElement.height / 2]
+ * Top offset in px where the event is triggered
+ * @param {Object} [aExpectedEvent]
+ * Information about the expected event to occur
+ * @param {MozMillElement} [aExpectedEvent.target=this.element]
+ * Element which should receive the event
+ * @param {MozMillElement} [aExpectedEvent.type]
+ * Type of the expected mouse event
+ */
+MozMillElement.prototype.tap = function (aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {
+ clickCount: 1,
+ inputSource: Ci.nsIDOMMouseEvent.MOZ_SOURCE_TOUCH
+ }, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.tap()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a double tap on the given element
+ *
+ * @param {Number} [aOffsetX=aElement.width / 2]
+ * Left offset in px where the event is triggered
+ * @param {Number} [aOffsetY=aElement.height / 2]
+ * Top offset in px where the event is triggered
+ * @param {Object} [aExpectedEvent]
+ * Information about the expected event to occur
+ * @param {MozMillElement} [aExpectedEvent.target=this.element]
+ * Element which should receive the event
+ * @param {MozMillElement} [aExpectedEvent.type]
+ * Type of the expected mouse event
+ */
+MozMillElement.prototype.doubleTap = function (aOffsetX, aOffsetY, aExpectedEvent) {
+ this.mouseEvent(aOffsetX, aOffsetY, {
+ clickCount: 2,
+ inputSource: Ci.nsIDOMMouseEvent.MOZ_SOURCE_TOUCH
+ }, aExpectedEvent);
+
+ broker.pass({'function':'MozMillElement.doubleTap()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a long press
+ *
+ * @param {Number} aOffsetX
+ * Left offset in px where the event is triggered
+ * @param {Number} aOffsetY
+ * Top offset in px where the event is triggered
+ * @param {Number} [aTime=1000]
+ * Duration of the "press" event in ms
+ */
+MozMillElement.prototype.longPress = function (aOffsetX, aOffsetY, aTime) {
+ var time = aTime || 1000;
+
+ this.touchStart(aOffsetX, aOffsetY);
+ utils.sleep(time);
+ this.touchEnd(aOffsetX, aOffsetY);
+
+ broker.pass({'function':'MozMillElement.longPress()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a touch & drag event on the given element
+ *
+ * @param {Number} aOffsetX1
+ * Left offset of the start position
+ * @param {Number} aOffsetY1
+ * Top offset of the start position
+ * @param {Number} aOffsetX2
+ * Left offset of the end position
+ * @param {Number} aOffsetY2
+ * Top offset of the end position
+ */
+MozMillElement.prototype.touchDrag = function (aOffsetX1, aOffsetY1, aOffsetX2, aOffsetY2) {
+ this.touchStart(aOffsetX1, aOffsetY1);
+ this.touchMove(aOffsetX2, aOffsetY2);
+ this.touchEnd(aOffsetX2, aOffsetY2);
+
+ broker.pass({'function':'MozMillElement.move()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a press / touchstart event on the given element
+ *
+ * @param {Number} aOffsetX
+ * Left offset where the event is triggered
+ * @param {Number} aOffsetY
+ * Top offset where the event is triggered
+ */
+MozMillElement.prototype.touchStart = function (aOffsetX, aOffsetY) {
+ this.touchEvent(aOffsetX, aOffsetY, { type: "touchstart" });
+
+ broker.pass({'function':'MozMillElement.touchStart()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a release / touchend event on the given element
+ *
+ * @param {Number} aOffsetX
+ * Left offset where the event is triggered
+ * @param {Number} aOffsetY
+ * Top offset where the event is triggered
+ */
+MozMillElement.prototype.touchEnd = function (aOffsetX, aOffsetY) {
+ this.touchEvent(aOffsetX, aOffsetY, { type: "touchend" });
+
+ broker.pass({'function':'MozMillElement.touchEnd()'});
+
+ return true;
+};
+
+/**
+ * Synthesize a touchMove event on the given element
+ *
+ * @param {Number} aOffsetX
+ * Left offset where the event is triggered
+ * @param {Number} aOffsetY
+ * Top offset where the event is triggered
+ */
+MozMillElement.prototype.touchMove = function (aOffsetX, aOffsetY) {
+ this.touchEvent(aOffsetX, aOffsetY, { type: "touchmove" });
+
+ broker.pass({'function':'MozMillElement.touchMove()'});
+
+ return true;
+};
+
+MozMillElement.prototype.waitForElement = function (timeout, interval) {
+ var elem = this;
+
+ assert.waitFor(function () {
+ return elem.exists();
+ }, "Element.waitForElement(): Element '" + this.getInfo() +
+ "' has been found", timeout, interval);
+
+ broker.pass({'function':'MozMillElement.waitForElement()'});
+};
+
+MozMillElement.prototype.waitForElementNotPresent = function (timeout, interval) {
+ var elem = this;
+
+ assert.waitFor(function () {
+ return !elem.exists();
+ }, "Element.waitForElementNotPresent(): Element '" + this.getInfo() +
+ "' has not been found", timeout, interval);
+
+ broker.pass({'function':'MozMillElement.waitForElementNotPresent()'});
+};
+
+MozMillElement.prototype.waitThenClick = function (timeout, interval,
+ aOffsetX, aOffsetY, aExpectedEvent) {
+ this.waitForElement(timeout, interval);
+ this.click(aOffsetX, aOffsetY, aExpectedEvent);
+};
+
+/**
+ * Waits for the element to be available in the DOM, then trigger a tap event
+ *
+ * @param {Number} [aTimeout=5000]
+ * Time to wait for the element to be available
+ * @param {Number} [aInterval=100]
+ * Interval to check for availability
+ * @param {Number} [aOffsetX=aElement.width / 2]
+ * Left offset where the event is triggered
+ * @param {Number} [aOffsetY=aElement.height / 2]
+ * Top offset where the event is triggered
+ * @param {Object} [aExpectedEvent]
+ * Information about the expected event to occur
+ * @param {MozMillElement} [aExpectedEvent.target=this.element]
+ * Element which should receive the event
+ * @param {MozMillElement} [aExpectedEvent.type]
+ * Type of the expected mouse event
+ */
+MozMillElement.prototype.waitThenTap = function (aTimeout, aInterval,
+ aOffsetX, aOffsetY, aExpectedEvent) {
+ this.waitForElement(aTimeout, aInterval);
+ this.tap(aOffsetX, aOffsetY, aExpectedEvent);
+};
+
+// Dispatches an HTMLEvent
+MozMillElement.prototype.dispatchEvent = function (eventType, canBubble, modifiers) {
+ canBubble = canBubble || true;
+ modifiers = modifiers || { };
+
+ let document = 'ownerDocument' in this.element ? this.element.ownerDocument
+ : this.element.document;
+
+ let evt = document.createEvent('HTMLEvents');
+ evt.shiftKey = modifiers["shift"];
+ evt.metaKey = modifiers["meta"];
+ evt.altKey = modifiers["alt"];
+ evt.ctrlKey = modifiers["ctrl"];
+ evt.initEvent(eventType, canBubble, true);
+
+ this.element.dispatchEvent(evt);
+};
+
+
+/**
+ * MozMillCheckBox, which inherits from MozMillElement
+ */
+function MozMillCheckBox(locatorType, locator, args) {
+ MozMillElement.call(this, locatorType, locator, args);
+}
+
+
+MozMillCheckBox.prototype = Object.create(MozMillElement.prototype, {
+ check : {
+ /**
+ * Enable/Disable a checkbox depending on the target state
+ *
+ * @param {boolean} state State to set
+ * @return {boolean} Success state
+ */
+ value : function MMCB_check(state) {
+ var result = false;
+
+ if (!this.element) {
+ throw new Error("could not find element " + this.getInfo());
+ }
+
+ // If we have a XUL element, unwrap its XPCNativeWrapper
+ if (this.element.namespaceURI == NAMESPACE_XUL) {
+ this.element = utils.unwrapNode(this.element);
+ }
+
+ state = (typeof(state) == "boolean") ? state : false;
+ if (state != this.element.checked) {
+ this.click();
+ var element = this.element;
+
+ assert.waitFor(function () {
+ return element.checked == state;
+ }, "CheckBox.check(): Checkbox " + this.getInfo() + " could not be checked/unchecked", 500);
+
+ result = true;
+ }
+
+ broker.pass({'function':'MozMillCheckBox.check(' + this.getInfo() +
+ ', state: ' + state + ')'});
+
+ return result;
+ }
+ }
+});
+
+
+/**
+ * Returns true if node is of type MozMillCheckBox
+ *
+ * @static
+ * @param {DOMNode} node Node to check for its type
+ * @return {boolean} True if node is of type checkbox
+ */
+MozMillCheckBox.isType = function MMCB_isType(node) {
+ return ((node.localName.toLowerCase() == "input" && node.getAttribute("type") == "checkbox") ||
+ (node.localName.toLowerCase() == 'toolbarbutton' && node.getAttribute('type') == 'checkbox') ||
+ (node.localName.toLowerCase() == 'checkbox'));
+};
+
+
+/**
+ * MozMillRadio, which inherits from MozMillElement
+ */
+function MozMillRadio(locatorType, locator, args) {
+ MozMillElement.call(this, locatorType, locator, args);
+}
+
+
+MozMillRadio.prototype = Object.create(MozMillElement.prototype, {
+ select : {
+ /**
+ * Select the given radio button
+ *
+ * @param {number} [index=0]
+ * Specifies which radio button in the group to select (only
+ * applicable to radiogroup elements)
+ * @return {boolean} Success state
+ */
+ value : function MMR_select(index) {
+ if (!this.element) {
+ throw new Error("could not find element " + this.getInfo());
+ }
+
+ if (this.element.localName.toLowerCase() == "radiogroup") {
+ var element = this.element.getElementsByTagName("radio")[index || 0];
+ new MozMillRadio("Elem", element).click();
+ } else {
+ var element = this.element;
+ this.click();
+ }
+
+ assert.waitFor(function () {
+ // If we have a XUL element, unwrap its XPCNativeWrapper
+ if (element.namespaceURI == NAMESPACE_XUL) {
+ element = utils.unwrapNode(element);
+ return element.selected == true;
+ }
+
+ return element.checked == true;
+ }, "Radio.select(): Radio button " + this.getInfo() + " has been selected", 500);
+
+ broker.pass({'function':'MozMillRadio.select(' + this.getInfo() + ')'});
+
+ return true;
+ }
+ }
+});
+
+
+/**
+ * Returns true if node is of type MozMillRadio
+ *
+ * @static
+ * @param {DOMNode} node Node to check for its type
+ * @return {boolean} True if node is of type radio
+ */
+MozMillRadio.isType = function MMR_isType(node) {
+ return ((node.localName.toLowerCase() == 'input' && node.getAttribute('type') == 'radio') ||
+ (node.localName.toLowerCase() == 'toolbarbutton' && node.getAttribute('type') == 'radio') ||
+ (node.localName.toLowerCase() == 'radio') ||
+ (node.localName.toLowerCase() == 'radiogroup'));
+};
+
+
+/**
+ * MozMillDropList, which inherits from MozMillElement
+ */
+function MozMillDropList(locatorType, locator, args) {
+ MozMillElement.call(this, locatorType, locator, args);
+}
+
+
+MozMillDropList.prototype = Object.create(MozMillElement.prototype, {
+ select : {
+ /**
+ * Select the specified option and trigger the relevant events of the element
+ * @return {boolean}
+ */
+ value : function MMDL_select(index, option, value) {
+ if (!this.element){
+ throw new Error("Could not find element " + this.getInfo());
+ }
+
+ //if we have a select drop down
+ if (this.element.localName.toLowerCase() == "select"){
+ var item = null;
+
+ // The selected item should be set via its index
+ if (index != undefined) {
+ // Resetting a menulist has to be handled separately
+ if (index == -1) {
+ this.dispatchEvent('focus', false);
+ this.element.selectedIndex = index;
+ this.dispatchEvent('change', true);
+
+ broker.pass({'function':'MozMillDropList.select()'});
+
+ return true;
+ } else {
+ item = this.element.options.item(index);
+ }
+ } else {
+ for (var i = 0; i < this.element.options.length; i++) {
+ var entry = this.element.options.item(i);
+ if (option != undefined && entry.innerHTML == option ||
+ value != undefined && entry.value == value) {
+ item = entry;
+ break;
+ }
+ }
+ }
+
+ // Click the item
+ try {
+ // EventUtils.synthesizeMouse doesn't work.
+ this.dispatchEvent('focus', false);
+ item.selected = true;
+ this.dispatchEvent('change', true);
+
+ var self = this;
+ var selected = index || option || value;
+ assert.waitFor(function () {
+ switch (selected) {
+ case index:
+ return selected === self.element.selectedIndex;
+ break;
+ case option:
+ return selected === item.label;
+ break;
+ case value:
+ return selected === item.value;
+ break;
+ }
+ }, "DropList.select(): The correct item has been selected");
+
+ broker.pass({'function':'MozMillDropList.select()'});
+
+ return true;
+ } catch (e) {
+ throw new Error("No item selected for element " + this.getInfo());
+ }
+ }
+ //if we have a xul menupopup select accordingly
+ else if (this.element.namespaceURI.toLowerCase() == NAMESPACE_XUL) {
+ var ownerDoc = this.element.ownerDocument;
+ // Unwrap the XUL element's XPCNativeWrapper
+ this.element = utils.unwrapNode(this.element);
+ // Get the list of menuitems
+ var menuitems = this.element.
+ getElementsByTagNameNS(NAMESPACE_XUL, "menupopup")[0].
+ getElementsByTagNameNS(NAMESPACE_XUL, "menuitem");
+
+ var item = null;
+
+ if (index != undefined) {
+ if (index == -1) {
+ this.dispatchEvent('focus', false);
+ this.element.boxObject.activeChild = null;
+ this.dispatchEvent('change', true);
+
+ broker.pass({'function':'MozMillDropList.select()'});
+
+ return true;
+ } else {
+ item = menuitems[index];
+ }
+ } else {
+ for (var i = 0; i < menuitems.length; i++) {
+ var entry = menuitems[i];
+ if (option != undefined && entry.label == option ||
+ value != undefined && entry.value == value) {
+ item = entry;
+ break;
+ }
+ }
+ }
+
+ // Click the item
+ try {
+ item.click();
+
+ var self = this;
+ var selected = index || option || value;
+ assert.waitFor(function () {
+ switch (selected) {
+ case index:
+ return selected === self.element.selectedIndex;
+ break;
+ case option:
+ return selected === self.element.label;
+ break;
+ case value:
+ return selected === self.element.value;
+ break;
+ }
+ }, "DropList.select(): The correct item has been selected");
+
+ broker.pass({'function':'MozMillDropList.select()'});
+
+ return true;
+ } catch (e) {
+ throw new Error('No item selected for element ' + this.getInfo());
+ }
+ }
+ }
+ }
+});
+
+
+/**
+ * Returns true if node is of type MozMillDropList
+ *
+ * @static
+ * @param {DOMNode} node Node to check for its type
+ * @return {boolean} True if node is of type dropdown list
+ */
+MozMillDropList.isType = function MMR_isType(node) {
+ return ((node.localName.toLowerCase() == 'toolbarbutton' &&
+ (node.getAttribute('type') == 'menu' || node.getAttribute('type') == 'menu-button')) ||
+ (node.localName.toLowerCase() == 'menu') ||
+ (node.localName.toLowerCase() == 'menulist') ||
+ (node.localName.toLowerCase() == 'select' ));
+};
+
+
+/**
+ * MozMillTextBox, which inherits from MozMillElement
+ */
+function MozMillTextBox(locatorType, locator, args) {
+ MozMillElement.call(this, locatorType, locator, args);
+}
+
+
+MozMillTextBox.prototype = Object.create(MozMillElement.prototype, {
+ sendKeys : {
+ /**
+ * Synthesize keypress events for each character on the given element
+ *
+ * @param {string} aText
+ * The text to send as single keypress events
+ * @param {object} aModifiers
+ * Information about the modifier keys to send
+ * Elements: accelKey - Hold down the accelerator key (ctrl/meta)
+ * [optional - default: false]
+ * altKey - Hold down the alt key
+ * [optional - default: false]
+ * ctrlKey - Hold down the ctrl key
+ * [optional - default: false]
+ * metaKey - Hold down the meta key (command key on Mac)
+ * [optional - default: false]
+ * shiftKey - Hold down the shift key
+ * [optional - default: false]
+ * @param {object} aExpectedEvent
+ * Information about the expected event to occur
+ * Elements: target - Element which should receive the event
+ * [optional - default: current element]
+ * type - Type of the expected key event
+ * @return {boolean} Success state
+ */
+ value : function MMTB_sendKeys(aText, aModifiers, aExpectedEvent) {
+ if (!this.element) {
+ throw new Error("could not find element " + this.getInfo());
+ }
+
+ var element = this.element;
+ Array.forEach(aText, function (letter) {
+ var win = element.ownerDocument ? element.ownerDocument.defaultView
+ : element;
+ element.focus();
+
+ if (aExpectedEvent) {
+ if (!aExpectedEvent.type) {
+ throw new Error(arguments.callee.name + ": Expected event type not specified");
+ }
+
+ var target = aExpectedEvent.target ? aExpectedEvent.target.getNode()
+ : element;
+ EventUtils.synthesizeKeyExpectEvent(letter, aModifiers || {}, target,
+ aExpectedEvent.type,
+ "MozMillTextBox.sendKeys()", win);
+ } else {
+ EventUtils.synthesizeKey(letter, aModifiers || {}, win);
+ }
+ });
+
+ broker.pass({'function':'MozMillTextBox.type()'});
+
+ return true;
+ }
+ }
+});
+
+
+/**
+ * Returns true if node is of type MozMillTextBox
+ *
+ * @static
+ * @param {DOMNode} node Node to check for its type
+ * @return {boolean} True if node is of type textbox
+ */
+MozMillTextBox.isType = function MMR_isType(node) {
+ return ((node.localName.toLowerCase() == 'input' &&
+ (node.getAttribute('type') == 'text' || node.getAttribute('type') == 'search')) ||
+ (node.localName.toLowerCase() == 'textarea') ||
+ (node.localName.toLowerCase() == 'textbox'));
+};
diff --git a/services/sync/tps/extensions/mozmill/resource/driver/mozmill.js b/services/sync/tps/extensions/mozmill/resource/driver/mozmill.js
new file mode 100644
index 000000000..1e422591f
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/driver/mozmill.js
@@ -0,0 +1,285 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ["controller", "utils", "elementslib", "os",
+ "getBrowserController", "newBrowserController",
+ "getAddonsController", "getPreferencesController",
+ "newMail3PaneController", "getMail3PaneController",
+ "wm", "platform", "getAddrbkController",
+ "getMsgComposeController", "getDownloadsController",
+ "Application", "findElement",
+ "getPlacesController", 'isMac', 'isLinux', 'isWindows',
+ "firePythonCallback", "getAddons"
+ ];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+
+Cu.import("resource://gre/modules/AddonManager.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+
+// imports
+var assertions = {}; Cu.import('resource://mozmill/modules/assertions.js', assertions);
+var broker = {}; Cu.import('resource://mozmill/driver/msgbroker.js', broker);
+var controller = {}; Cu.import('resource://mozmill/driver/controller.js', controller);
+var elementslib = {}; Cu.import('resource://mozmill/driver/elementslib.js', elementslib);
+var findElement = {}; Cu.import('resource://mozmill/driver/mozelement.js', findElement);
+var os = {}; Cu.import('resource://mozmill/stdlib/os.js', os);
+var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils);
+var windows = {}; Cu.import('resource://mozmill/modules/windows.js', windows);
+
+
+const DEBUG = false;
+
+// This is a useful "check" timer. See utils.js, good for debugging
+if (DEBUG) {
+ utils.startTimer();
+}
+
+var assert = new assertions.Assert();
+
+// platform information
+var platform = os.getPlatform();
+var isMac = false;
+var isWindows = false;
+var isLinux = false;
+
+if (platform == "darwin"){
+ isMac = true;
+}
+
+if (platform == "winnt"){
+ isWindows = true;
+}
+
+if (platform == "linux"){
+ isLinux = true;
+}
+
+var wm = Services.wm;
+
+var appInfo = Services.appinfo;
+var Application = utils.applicationName;
+
+
+/**
+ * Retrieves the list with information about installed add-ons.
+ *
+ * @returns {String} JSON data of installed add-ons
+ */
+function getAddons() {
+ var addons = null;
+
+ AddonManager.getAllAddons(function (addonList) {
+ var tmp_list = [ ];
+
+ addonList.forEach(function (addon) {
+ var tmp = { };
+
+ // We have to filter out properties of type 'function' of the addon
+ // object, which will break JSON.stringify() and result in incomplete
+ // addon information.
+ for (var key in addon) {
+ if (typeof(addon[key]) !== "function") {
+ tmp[key] = addon[key];
+ }
+ }
+
+ tmp_list.push(tmp);
+ });
+
+ addons = tmp_list;
+ });
+
+ try {
+ // Sychronize with getAllAddons so we do not return too early
+ assert.waitFor(function () {
+ return !!addons;
+ })
+
+ return addons;
+ } catch (e) {
+ return null;
+ }
+}
+
+/**
+ * Retrieves application details for the Mozmill report
+ *
+ * @return {String} JSON data of application details
+ */
+function getApplicationDetails() {
+ var locale = Cc["@mozilla.org/chrome/chrome-registry;1"]
+ .getService(Ci.nsIXULChromeRegistry)
+ .getSelectedLocale("global");
+
+ // Put all our necessary information into JSON and return it:
+ // appinfo, startupinfo, and addons
+ var details = {
+ application_id: appInfo.ID,
+ application_name: Application,
+ application_version: appInfo.version,
+ application_locale: locale,
+ platform_buildid: appInfo.platformBuildID,
+ platform_version: appInfo.platformVersion,
+ addons: getAddons(),
+ startupinfo: getStartupInfo(),
+ paths: {
+ appdata: Services.dirsvc.get('UAppData', Ci.nsIFile).path,
+ profile: Services.dirsvc.get('ProfD', Ci.nsIFile).path
+ }
+ };
+
+ return JSON.stringify(details);
+}
+
+// get startup time if available
+// see http://blog.mozilla.com/tglek/2011/04/26/measuring-startup-speed-correctly/
+function getStartupInfo() {
+ var startupInfo = {};
+
+ try {
+ var _startupInfo = Services.startup.getStartupInfo();
+ for (var time in _startupInfo) {
+ // convert from Date object to ms since epoch
+ startupInfo[time] = _startupInfo[time].getTime();
+ }
+ } catch (e) {
+ startupInfo = null;
+ }
+
+ return startupInfo;
+}
+
+
+
+function newBrowserController () {
+ return new controller.MozMillController(utils.getMethodInWindows('OpenBrowserWindow')());
+}
+
+function getBrowserController () {
+ var browserWindow = wm.getMostRecentWindow("navigator:browser");
+
+ if (browserWindow == null) {
+ return newBrowserController();
+ } else {
+ return new controller.MozMillController(browserWindow);
+ }
+}
+
+function getPlacesController () {
+ utils.getMethodInWindows('PlacesCommandHook').showPlacesOrganizer('AllBookmarks');
+
+ return new controller.MozMillController(wm.getMostRecentWindow(''));
+}
+
+function getAddonsController () {
+ if (Application == 'SeaMonkey') {
+ utils.getMethodInWindows('toEM')();
+ }
+ else if (Application == 'Thunderbird') {
+ utils.getMethodInWindows('openAddonsMgr')();
+ }
+ else if (Application == 'Sunbird') {
+ utils.getMethodInWindows('goOpenAddons')();
+ } else {
+ utils.getMethodInWindows('BrowserOpenAddonsMgr')();
+ }
+
+ return new controller.MozMillController(wm.getMostRecentWindow(''));
+}
+
+function getDownloadsController() {
+ utils.getMethodInWindows('BrowserDownloadsUI')();
+
+ return new controller.MozMillController(wm.getMostRecentWindow(''));
+}
+
+function getPreferencesController() {
+ if (Application == 'Thunderbird') {
+ utils.getMethodInWindows('openOptionsDialog')();
+ } else {
+ utils.getMethodInWindows('openPreferences')();
+ }
+
+ return new controller.MozMillController(wm.getMostRecentWindow(''));
+}
+
+// Thunderbird functions
+function newMail3PaneController () {
+ return new controller.MozMillController(utils.getMethodInWindows('toMessengerWindow')());
+}
+
+function getMail3PaneController () {
+ var mail3PaneWindow = wm.getMostRecentWindow("mail:3pane");
+
+ if (mail3PaneWindow == null) {
+ return newMail3PaneController();
+ } else {
+ return new controller.MozMillController(mail3PaneWindow);
+ }
+}
+
+// Thunderbird - Address book window
+function newAddrbkController () {
+ utils.getMethodInWindows("toAddressBook")();
+ utils.sleep(2000);
+ var addyWin = wm.getMostRecentWindow("mail:addressbook");
+
+ return new controller.MozMillController(addyWin);
+}
+
+function getAddrbkController () {
+ var addrbkWindow = wm.getMostRecentWindow("mail:addressbook");
+ if (addrbkWindow == null) {
+ return newAddrbkController();
+ } else {
+ return new controller.MozMillController(addrbkWindow);
+ }
+}
+
+function firePythonCallback (filename, method, args, kwargs) {
+ obj = {'filename': filename, 'method': method};
+ obj['args'] = args || [];
+ obj['kwargs'] = kwargs || {};
+
+ broker.sendMessage("firePythonCallback", obj);
+}
+
+function timer (name) {
+ this.name = name;
+ this.timers = {};
+ this.actions = [];
+
+ frame.timers.push(this);
+}
+
+timer.prototype.start = function (name) {
+ this.timers[name].startTime = (new Date).getTime();
+}
+
+timer.prototype.stop = function (name) {
+ var t = this.timers[name];
+
+ t.endTime = (new Date).getTime();
+ t.totalTime = (t.endTime - t.startTime);
+}
+
+timer.prototype.end = function () {
+ frame.events.fireEvent("timer", this);
+ frame.timers.remove(this);
+}
+
+// Initialization
+
+/**
+ * Initialize Mozmill
+ */
+function initialize() {
+ windows.init();
+}
+
+initialize();
diff --git a/services/sync/tps/extensions/mozmill/resource/driver/msgbroker.js b/services/sync/tps/extensions/mozmill/resource/driver/msgbroker.js
new file mode 100644
index 000000000..95e431f08
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/driver/msgbroker.js
@@ -0,0 +1,58 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['addListener', 'addObject',
+ 'removeListener',
+ 'sendMessage', 'log', 'pass', 'fail'];
+
+var listeners = {};
+
+// add a listener for a specific message type
+function addListener(msgType, listener) {
+ if (listeners[msgType] === undefined) {
+ listeners[msgType] = [];
+ }
+
+ listeners[msgType].push(listener);
+}
+
+// add each method in an object as a message listener
+function addObject(object) {
+ for (var msgType in object) {
+ addListener(msgType, object[msgType]);
+ }
+}
+
+// remove a listener for all message types
+function removeListener(listener) {
+ for (var msgType in listeners) {
+ for (let i = 0; i < listeners.length; ++i) {
+ if (listeners[msgType][i] == listener) {
+ listeners[msgType].splice(i, 1); // remove listener from array
+ }
+ }
+ }
+}
+
+function sendMessage(msgType, obj) {
+ if (listeners[msgType] === undefined) {
+ return;
+ }
+
+ for (let i = 0; i < listeners[msgType].length; ++i) {
+ listeners[msgType][i](obj);
+ }
+}
+
+function log(obj) {
+ sendMessage('log', obj);
+}
+
+function pass(obj) {
+ sendMessage('pass', obj);
+}
+
+function fail(obj) {
+ sendMessage('fail', obj);
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/modules/assertions.js b/services/sync/tps/extensions/mozmill/resource/modules/assertions.js
new file mode 100644
index 000000000..c9991acf0
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/modules/assertions.js
@@ -0,0 +1,670 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['Assert', 'Expect'];
+
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/Services.jsm");
+
+var broker = {}; Cu.import('resource://mozmill/driver/msgbroker.js', broker);
+var errors = {}; Cu.import('resource://mozmill/modules/errors.js', errors);
+var stack = {}; Cu.import('resource://mozmill/modules/stack.js', stack);
+
+/**
+ * @name assertions
+ * @namespace Defines expect and assert methods to be used for assertions.
+ */
+
+/**
+ * The Assert class implements fatal assertions, and can be used in cases
+ * when a failing test has to directly abort the current test function. All
+ * remaining tasks will not be performed.
+ *
+ */
+var Assert = function () {}
+
+Assert.prototype = {
+
+ // The following deepEquals implementation is from Narwhal under this license:
+
+ // http://wiki.commonjs.org/wiki/Unit_Testing/1.0
+ //
+ // THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8!
+ //
+ // Originally from narwhal.js (http://narwhaljs.org)
+ // Copyright (c) 2009 Thomas Robinson <280north.com>
+ //
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
+ // of this software and associated documentation files (the 'Software'), to
+ // deal in the Software without restriction, including without limitation the
+ // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ // sell copies of the Software, and to permit persons to whom the Software is
+ // furnished to do so, subject to the following conditions:
+ //
+ // The above copyright notice and this permission notice shall be included in
+ // all copies or substantial portions of the Software.
+ //
+ // THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ // AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ // ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ _deepEqual: function (actual, expected) {
+ // 7.1. All identical values are equivalent, as determined by ===.
+ if (actual === expected) {
+ return true;
+
+ // 7.2. If the expected value is a Date object, the actual value is
+ // equivalent if it is also a Date object that refers to the same time.
+ } else if (actual instanceof Date && expected instanceof Date) {
+ return actual.getTime() === expected.getTime();
+
+ // 7.3. Other pairs that do not both pass typeof value == 'object',
+ // equivalence is determined by ==.
+ } else if (typeof actual != 'object' && typeof expected != 'object') {
+ return actual == expected;
+
+ // 7.4. For all other Object pairs, including Array objects, equivalence is
+ // determined by having the same number of owned properties (as verified
+ // with Object.prototype.hasOwnProperty.call), the same set of keys
+ // (although not necessarily the same order), equivalent values for every
+ // corresponding key, and an identical 'prototype' property. Note: this
+ // accounts for both named and indexed properties on Arrays.
+ } else {
+ return this._objEquiv(actual, expected);
+ }
+ },
+
+ _objEquiv: function (a, b) {
+ if (a == null || a == undefined || b == null || b == undefined)
+ return false;
+ // an identical 'prototype' property.
+ if (a.prototype !== b.prototype) return false;
+
+ function isArguments(object) {
+ return Object.prototype.toString.call(object) == '[object Arguments]';
+ }
+
+ //~~~I've managed to break Object.keys through screwy arguments passing.
+ // Converting to array solves the problem.
+ if (isArguments(a)) {
+ if (!isArguments(b)) {
+ return false;
+ }
+ a = pSlice.call(a);
+ b = pSlice.call(b);
+ return _deepEqual(a, b);
+ }
+ try {
+ var ka = Object.keys(a),
+ kb = Object.keys(b),
+ key, i;
+ } catch (e) {//happens when one is a string literal and the other isn't
+ return false;
+ }
+ // having the same number of owned properties (keys incorporates
+ // hasOwnProperty)
+ if (ka.length != kb.length)
+ return false;
+ //the same set of keys (although not necessarily the same order),
+ ka.sort();
+ kb.sort();
+ //~~~cheap key test
+ for (i = ka.length - 1; i >= 0; i--) {
+ if (ka[i] != kb[i])
+ return false;
+ }
+ //equivalent values for every corresponding key, and
+ //~~~possibly expensive deep test
+ for (i = ka.length - 1; i >= 0; i--) {
+ key = ka[i];
+ if (!this._deepEqual(a[key], b[key])) return false;
+ }
+ return true;
+ },
+
+ _expectedException : function Assert__expectedException(actual, expected) {
+ if (!actual || !expected) {
+ return false;
+ }
+
+ if (expected instanceof RegExp) {
+ return expected.test(actual);
+ } else if (actual instanceof expected) {
+ return true;
+ } else if (expected.call({}, actual) === true) {
+ return true;
+ } else if (actual.name === expected.name) {
+ return true;
+ }
+
+ return false;
+ },
+
+ /**
+ * Log a test as failing by throwing an AssertionException.
+ *
+ * @param {object} aResult
+ * Test result details used for reporting.
+ * <dl>
+ * <dd>fileName</dd>
+ * <dt>Name of the file in which the assertion failed.</dt>
+ * <dd>functionName</dd>
+ * <dt>Function in which the assertion failed.</dt>
+ * <dd>lineNumber</dd>
+ * <dt>Line number of the file in which the assertion failed.</dt>
+ * <dd>message</dd>
+ * <dt>Message why the assertion failed.</dt>
+ * </dl>
+ * @throws {errors.AssertionError}
+ *
+ */
+ _logFail: function Assert__logFail(aResult) {
+ throw new errors.AssertionError(aResult.message,
+ aResult.fileName,
+ aResult.lineNumber,
+ aResult.functionName,
+ aResult.name);
+ },
+
+ /**
+ * Log a test as passing by adding a pass frame.
+ *
+ * @param {object} aResult
+ * Test result details used for reporting.
+ * <dl>
+ * <dd>fileName</dd>
+ * <dt>Name of the file in which the assertion failed.</dt>
+ * <dd>functionName</dd>
+ * <dt>Function in which the assertion failed.</dt>
+ * <dd>lineNumber</dd>
+ * <dt>Line number of the file in which the assertion failed.</dt>
+ * <dd>message</dd>
+ * <dt>Message why the assertion failed.</dt>
+ * </dl>
+ */
+ _logPass: function Assert__logPass(aResult) {
+ broker.pass({pass: aResult});
+ },
+
+ /**
+ * Test the condition and mark test as passed or failed
+ *
+ * @param {boolean} aCondition
+ * Condition to test.
+ * @param {string} aMessage
+ * Message to show for the test result
+ * @param {string} aDiagnosis
+ * Diagnose message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ _test: function Assert__test(aCondition, aMessage, aDiagnosis) {
+ let diagnosis = aDiagnosis || "";
+ let message = aMessage || "";
+
+ if (diagnosis)
+ message = aMessage ? message + " - " + diagnosis : diagnosis;
+
+ // Build result data
+ let frame = stack.findCallerFrame(Components.stack);
+
+ let result = {
+ 'fileName' : frame.filename.replace(/(.*)-> /, ""),
+ 'functionName' : frame.name,
+ 'lineNumber' : frame.lineNumber,
+ 'message' : message
+ };
+
+ // Log test result
+ if (aCondition) {
+ this._logPass(result);
+ }
+ else {
+ result.stack = Components.stack;
+ this._logFail(result);
+ }
+
+ return aCondition;
+ },
+
+ /**
+ * Perform an always passing test
+ *
+ * @param {string} aMessage
+ * Message to show for the test result.
+ * @returns {boolean} Always returns true.
+ */
+ pass: function Assert_pass(aMessage) {
+ return this._test(true, aMessage, undefined);
+ },
+
+ /**
+ * Perform an always failing test
+ *
+ * @param {string} aMessage
+ * Message to show for the test result.
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Always returns false.
+ */
+ fail: function Assert_fail(aMessage) {
+ return this._test(false, aMessage, undefined);
+ },
+
+ /**
+ * Test if the value pass
+ *
+ * @param {boolean|string|number|object} aValue
+ * Value to test.
+ * @param {string} aMessage
+ * Message to show for the test result.
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ ok: function Assert_ok(aValue, aMessage) {
+ let condition = !!aValue;
+ let diagnosis = "got '" + aValue + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Test if both specified values are identical.
+ *
+ * @param {boolean|string|number|object} aValue
+ * Value to test.
+ * @param {boolean|string|number|object} aExpected
+ * Value to strictly compare with.
+ * @param {string} aMessage
+ * Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ equal: function Assert_equal(aValue, aExpected, aMessage) {
+ let condition = (aValue === aExpected);
+ let diagnosis = "'" + aValue + "' should equal '" + aExpected + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Test if both specified values are not identical.
+ *
+ * @param {boolean|string|number|object} aValue
+ * Value to test.
+ * @param {boolean|string|number|object} aExpected
+ * Value to strictly compare with.
+ * @param {string} aMessage
+ * Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ notEqual: function Assert_notEqual(aValue, aExpected, aMessage) {
+ let condition = (aValue !== aExpected);
+ let diagnosis = "'" + aValue + "' should not equal '" + aExpected + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Test if an object equals another object
+ *
+ * @param {object} aValue
+ * The object to test.
+ * @param {object} aExpected
+ * The object to strictly compare with.
+ * @param {string} aMessage
+ * Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ deepEqual: function equal(aValue, aExpected, aMessage) {
+ let condition = this._deepEqual(aValue, aExpected);
+ try {
+ var aValueString = JSON.stringify(aValue);
+ } catch (e) {
+ var aValueString = String(aValue);
+ }
+ try {
+ var aExpectedString = JSON.stringify(aExpected);
+ } catch (e) {
+ var aExpectedString = String(aExpected);
+ }
+
+ let diagnosis = "'" + aValueString + "' should equal '" +
+ aExpectedString + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Test if an object does not equal another object
+ *
+ * @param {object} aValue
+ * The object to test.
+ * @param {object} aExpected
+ * The object to strictly compare with.
+ * @param {string} aMessage
+ * Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ notDeepEqual: function notEqual(aValue, aExpected, aMessage) {
+ let condition = !this._deepEqual(aValue, aExpected);
+ try {
+ var aValueString = JSON.stringify(aValue);
+ } catch (e) {
+ var aValueString = String(aValue);
+ }
+ try {
+ var aExpectedString = JSON.stringify(aExpected);
+ } catch (e) {
+ var aExpectedString = String(aExpected);
+ }
+
+ let diagnosis = "'" + aValueString + "' should not equal '" +
+ aExpectedString + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Test if the regular expression matches the string.
+ *
+ * @param {string} aString
+ * String to test.
+ * @param {RegEx} aRegex
+ * Regular expression to use for testing that a match exists.
+ * @param {string} aMessage
+ * Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ match: function Assert_match(aString, aRegex, aMessage) {
+ // XXX Bug 634948
+ // Regex objects are transformed to strings when evaluated in a sandbox
+ // For now lets re-create the regex from its string representation
+ let pattern = flags = "";
+ try {
+ let matches = aRegex.toString().match(/\/(.*)\/(.*)/);
+
+ pattern = matches[1];
+ flags = matches[2];
+ } catch (e) {
+ }
+
+ let regex = new RegExp(pattern, flags);
+ let condition = (aString.match(regex) !== null);
+ let diagnosis = "'" + regex + "' matches for '" + aString + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Test if the regular expression does not match the string.
+ *
+ * @param {string} aString
+ * String to test.
+ * @param {RegEx} aRegex
+ * Regular expression to use for testing that a match does not exist.
+ * @param {string} aMessage
+ * Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ notMatch: function Assert_notMatch(aString, aRegex, aMessage) {
+ // XXX Bug 634948
+ // Regex objects are transformed to strings when evaluated in a sandbox
+ // For now lets re-create the regex from its string representation
+ let pattern = flags = "";
+ try {
+ let matches = aRegex.toString().match(/\/(.*)\/(.*)/);
+
+ pattern = matches[1];
+ flags = matches[2];
+ } catch (e) {
+ }
+
+ let regex = new RegExp(pattern, flags);
+ let condition = (aString.match(regex) === null);
+ let diagnosis = "'" + regex + "' doesn't match for '" + aString + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+
+ /**
+ * Test if a code block throws an exception.
+ *
+ * @param {string} block
+ * function to call to test for exception
+ * @param {RegEx} error
+ * the expected error class
+ * @param {string} message
+ * message to present if assertion fails
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ throws : function Assert_throws(block, /*optional*/error, /*optional*/message) {
+ return this._throws.apply(this, [true].concat(Array.prototype.slice.call(arguments)));
+ },
+
+ /**
+ * Test if a code block doesn't throw an exception.
+ *
+ * @param {string} block
+ * function to call to test for exception
+ * @param {RegEx} error
+ * the expected error class
+ * @param {string} message
+ * message to present if assertion fails
+ * @throws {errors.AssertionError}
+ *
+ * @returns {boolean} Result of the test.
+ */
+ doesNotThrow : function Assert_doesNotThrow(block, /*optional*/error, /*optional*/message) {
+ return this._throws.apply(this, [false].concat(Array.prototype.slice.call(arguments)));
+ },
+
+ /* Tests whether a code block throws the expected exception
+ class. helper for throws() and doesNotThrow()
+
+ adapted from node.js's assert._throws()
+ https://github.com/joyent/node/blob/master/lib/assert.js
+ */
+ _throws : function Assert__throws(shouldThrow, block, expected, message) {
+ var actual;
+
+ if (typeof expected === 'string') {
+ message = expected;
+ expected = null;
+ }
+
+ try {
+ block();
+ } catch (e) {
+ actual = e;
+ }
+
+ message = (expected && expected.name ? ' (' + expected.name + ').' : '.') +
+ (message ? ' ' + message : '.');
+
+ if (shouldThrow && !actual) {
+ return this._test(false, message, 'Missing expected exception');
+ }
+
+ if (!shouldThrow && this._expectedException(actual, expected)) {
+ return this._test(false, message, 'Got unwanted exception');
+ }
+
+ if ((shouldThrow && actual && expected &&
+ !this._expectedException(actual, expected)) || (!shouldThrow && actual)) {
+ throw actual;
+ }
+
+ return this._test(true, message);
+ },
+
+ /**
+ * Test if the string contains the pattern.
+ *
+ * @param {String} aString String to test.
+ * @param {String} aPattern Pattern to look for in the string
+ * @param {String} aMessage Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {Boolean} Result of the test.
+ */
+ contain: function Assert_contain(aString, aPattern, aMessage) {
+ let condition = (aString.indexOf(aPattern) !== -1);
+ let diagnosis = "'" + aString + "' should contain '" + aPattern + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Test if the string does not contain the pattern.
+ *
+ * @param {String} aString String to test.
+ * @param {String} aPattern Pattern to look for in the string
+ * @param {String} aMessage Message to show for the test result
+ * @throws {errors.AssertionError}
+ *
+ * @returns {Boolean} Result of the test.
+ */
+ notContain: function Assert_notContain(aString, aPattern, aMessage) {
+ let condition = (aString.indexOf(aPattern) === -1);
+ let diagnosis = "'" + aString + "' should not contain '" + aPattern + "'";
+
+ return this._test(condition, aMessage, diagnosis);
+ },
+
+ /**
+ * Waits for the callback evaluates to true
+ *
+ * @param {Function} aCallback
+ * Callback for evaluation
+ * @param {String} aMessage
+ * Message to show for result
+ * @param {Number} aTimeout
+ * Timeout in waiting for evaluation
+ * @param {Number} aInterval
+ * Interval between evaluation attempts
+ * @param {Object} aThisObject
+ * this object
+ * @throws {errors.AssertionError}
+ *
+ * @returns {Boolean} Result of the test.
+ */
+ waitFor: function Assert_waitFor(aCallback, aMessage, aTimeout, aInterval, aThisObject) {
+ var timeout = aTimeout || 5000;
+ var interval = aInterval || 100;
+
+ var self = {
+ timeIsUp: false,
+ result: aCallback.call(aThisObject)
+ };
+ var deadline = Date.now() + timeout;
+
+ function wait() {
+ if (self.result !== true) {
+ self.result = aCallback.call(aThisObject);
+ self.timeIsUp = Date.now() > deadline;
+ }
+ }
+
+ var hwindow = Services.appShell.hiddenDOMWindow;
+ var timeoutInterval = hwindow.setInterval(wait, interval);
+ var thread = Services.tm.currentThread;
+
+ while (self.result !== true && !self.timeIsUp) {
+ thread.processNextEvent(true);
+
+ let type = typeof(self.result);
+ if (type !== 'boolean')
+ throw TypeError("waitFor() callback has to return a boolean" +
+ " instead of '" + type + "'");
+ }
+
+ hwindow.clearInterval(timeoutInterval);
+
+ if (self.result !== true && self.timeIsUp) {
+ aMessage = aMessage || arguments.callee.name + ": Timeout exceeded for '" + aCallback + "'";
+ throw new errors.TimeoutError(aMessage);
+ }
+
+ broker.pass({'function':'assert.waitFor()'});
+ return true;
+ }
+}
+
+/* non-fatal assertions */
+var Expect = function () {}
+
+Expect.prototype = new Assert();
+
+/**
+ * Log a test as failing by adding a fail frame.
+ *
+ * @param {object} aResult
+ * Test result details used for reporting.
+ * <dl>
+ * <dd>fileName</dd>
+ * <dt>Name of the file in which the assertion failed.</dt>
+ * <dd>functionName</dd>
+ * <dt>Function in which the assertion failed.</dt>
+ * <dd>lineNumber</dd>
+ * <dt>Line number of the file in which the assertion failed.</dt>
+ * <dd>message</dd>
+ * <dt>Message why the assertion failed.</dt>
+ * </dl>
+ */
+Expect.prototype._logFail = function Expect__logFail(aResult) {
+ broker.fail({fail: aResult});
+}
+
+/**
+ * Waits for the callback evaluates to true
+ *
+ * @param {Function} aCallback
+ * Callback for evaluation
+ * @param {String} aMessage
+ * Message to show for result
+ * @param {Number} aTimeout
+ * Timeout in waiting for evaluation
+ * @param {Number} aInterval
+ * Interval between evaluation attempts
+ * @param {Object} aThisObject
+ * this object
+ */
+Expect.prototype.waitFor = function Expect_waitFor(aCallback, aMessage, aTimeout, aInterval, aThisObject) {
+ let condition = true;
+ let message = aMessage;
+
+ try {
+ Assert.prototype.waitFor.apply(this, arguments);
+ }
+ catch (ex) {
+ if (!ex instanceof errors.AssertionError) {
+ throw ex;
+ }
+ message = ex.message;
+ condition = false;
+ }
+
+ return this._test(condition, message);
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/modules/driver.js b/services/sync/tps/extensions/mozmill/resource/modules/driver.js
new file mode 100644
index 000000000..17fcfbde6
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/modules/driver.js
@@ -0,0 +1,290 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * @namespace Defines the Mozmill driver for global actions
+ */
+var driver = exports;
+
+Cu.import("resource://gre/modules/Services.jsm");
+
+// Temporarily include utils module to re-use sleep
+var assertions = {}; Cu.import('resource://mozmill/modules/assertions.js', assertions);
+var mozmill = {}; Cu.import("resource://mozmill/driver/mozmill.js", mozmill);
+var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils);
+
+/**
+ * Gets the topmost browser window. If there are none at that time, optionally
+ * opens one. Otherwise will raise an exception if none are found.
+ *
+ * @memberOf driver
+ * @param {Boolean] [aOpenIfNone=true] Open a new browser window if none are found.
+ * @returns {DOMWindow}
+ */
+function getBrowserWindow(aOpenIfNone) {
+ // Set default
+ if (typeof aOpenIfNone === 'undefined') {
+ aOpenIfNone = true;
+ }
+
+ // If implicit open is off, turn on strict checking, and vice versa.
+ let win = getTopmostWindowByType("navigator:browser", !aOpenIfNone);
+
+ // Can just assume automatic open here. If we didn't want it and nothing found,
+ // we already raised above when getTopmostWindow was called.
+ if (!win)
+ win = openBrowserWindow();
+
+ return win;
+}
+
+
+/**
+ * Retrieves the hidden window on OS X
+ *
+ * @memberOf driver
+ * @returns {DOMWindow} The hidden window
+ */
+function getHiddenWindow() {
+ return Services.appShell.hiddenDOMWindow;
+}
+
+
+/**
+ * Opens a new browser window
+ *
+ * @memberOf driver
+ * @returns {DOMWindow}
+ */
+function openBrowserWindow() {
+ // On OS X we have to be able to create a new browser window even with no other
+ // window open. Therefore we have to use the hidden window. On other platforms
+ // at least one remaining browser window has to exist.
+ var win = mozmill.isMac ? getHiddenWindow() :
+ getTopmostWindowByType("navigator:browser", true);
+ return win.OpenBrowserWindow();
+}
+
+
+/**
+ * Pause the test execution for the given amount of time
+ *
+ * @type utils.sleep
+ * @memberOf driver
+ */
+var sleep = utils.sleep;
+
+/**
+ * Wait until the given condition via the callback returns true.
+ *
+ * @type utils.waitFor
+ * @memberOf driver
+ */
+var waitFor = assertions.Assert.waitFor;
+
+//
+// INTERNAL WINDOW ENUMERATIONS
+//
+
+/**
+ * Internal function to build a list of DOM windows using a given enumerator
+ * and filter.
+ *
+ * @private
+ * @memberOf driver
+ * @param {nsISimpleEnumerator} aEnumerator Window enumerator to use.
+ * @param {Function} [aFilterCallback] Function which is used to filter windows.
+ * @param {Boolean} [aStrict=true] Throw an error if no windows found
+ *
+ * @returns {DOMWindow[]} The windows found, in the same order as the enumerator.
+ */
+function _getWindows(aEnumerator, aFilterCallback, aStrict) {
+ // Set default
+ if (typeof aStrict === 'undefined')
+ aStrict = true;
+
+ let windows = [];
+
+ while (aEnumerator.hasMoreElements()) {
+ let window = aEnumerator.getNext();
+
+ if (!aFilterCallback || aFilterCallback(window)) {
+ windows.push(window);
+ }
+ }
+
+ // If this list is empty and we're strict, throw an error
+ if (windows.length === 0 && aStrict) {
+ var message = 'No windows were found';
+
+ // We'll throw a more detailed error if a filter was used.
+ if (aFilterCallback && aFilterCallback.name)
+ message += ' using filter "' + aFilterCallback.name + '"';
+
+ throw new Error(message);
+ }
+
+ return windows;
+}
+
+//
+// FILTER CALLBACKS
+//
+
+/**
+ * Generator of a closure to filter a window based by a method
+ *
+ * @memberOf driver
+ * @param {String} aName Name of the method in the window object.
+ * @returns {Boolean} True if the condition is met.
+ */
+function windowFilterByMethod(aName) {
+ return function byMethod(aWindow) { return (aName in aWindow); }
+}
+
+
+/**
+ * Generator of a closure to filter a window based by the its title
+ *
+ * @param {String} aTitle Title of the window.
+ * @returns {Boolean} True if the condition is met.
+ */
+function windowFilterByTitle(aTitle) {
+ return function byTitle(aWindow) { return (aWindow.document.title === aTitle); }
+}
+
+
+/**
+ * Generator of a closure to filter a window based by the its type
+ *
+ * @memberOf driver
+ * @param {String} aType Type of the window.
+ * @returns {Boolean} True if the condition is met.
+ */
+function windowFilterByType(aType) {
+ return function byType(aWindow) {
+ var type = aWindow.document.documentElement.getAttribute("windowtype");
+ return (type === aType);
+ }
+}
+
+//
+// WINDOW LIST RETRIEVAL FUNCTIONS
+//
+
+/**
+ * Retrieves a sorted list of open windows based on their age (newest to oldest),
+ * optionally matching filter criteria.
+ *
+ * @memberOf driver
+ * @param {Function} [aFilterCallback] Function which is used to filter windows.
+ * @param {Boolean} [aStrict=true] Throw an error if no windows found
+ *
+ * @returns {DOMWindow[]} List of windows.
+ */
+function getWindowsByAge(aFilterCallback, aStrict) {
+ var windows = _getWindows(Services.wm.getEnumerator(""),
+ aFilterCallback, aStrict);
+
+ // Reverse the list, since naturally comes back old->new
+ return windows.reverse();
+}
+
+
+/**
+ * Retrieves a sorted list of open windows based on their z order (topmost first),
+ * optionally matching filter criteria.
+ *
+ * @memberOf driver
+ * @param {Function} [aFilterCallback] Function which is used to filter windows.
+ * @param {Boolean} [aStrict=true] Throw an error if no windows found
+ *
+ * @returns {DOMWindow[]} List of windows.
+ */
+function getWindowsByZOrder(aFilterCallback, aStrict) {
+ return _getWindows(Services.wm.getZOrderDOMWindowEnumerator("", true),
+ aFilterCallback, aStrict);
+}
+
+//
+// SINGLE WINDOW RETRIEVAL FUNCTIONS
+//
+
+/**
+ * Retrieves the last opened window, optionally matching filter criteria.
+ *
+ * @memberOf driver
+ * @param {Function} [aFilterCallback] Function which is used to filter windows.
+ * @param {Boolean} [aStrict=true] If true, throws error if no window found.
+ *
+ * @returns {DOMWindow} The window, or null if none found and aStrict == false
+ */
+function getNewestWindow(aFilterCallback, aStrict) {
+ var windows = getWindowsByAge(aFilterCallback, aStrict);
+ return windows.length ? windows[0] : null;
+}
+
+/**
+ * Retrieves the topmost window, optionally matching filter criteria.
+ *
+ * @memberOf driver
+ * @param {Function} [aFilterCallback] Function which is used to filter windows.
+ * @param {Boolean} [aStrict=true] If true, throws error if no window found.
+ *
+ * @returns {DOMWindow} The window, or null if none found and aStrict == false
+ */
+function getTopmostWindow(aFilterCallback, aStrict) {
+ var windows = getWindowsByZOrder(aFilterCallback, aStrict);
+ return windows.length ? windows[0] : null;
+}
+
+
+/**
+ * Retrieves the topmost window given by the window type
+ *
+ * XXX: Bug 462222
+ * This function has to be used instead of getTopmostWindow until the
+ * underlying platform bug has been fixed.
+ *
+ * @memberOf driver
+ * @param {String} [aWindowType=null] Window type to query for
+ * @param {Boolean} [aStrict=true] Throw an error if no windows found
+ *
+ * @returns {DOMWindow} The window, or null if none found and aStrict == false
+ */
+function getTopmostWindowByType(aWindowType, aStrict) {
+ if (typeof aStrict === 'undefined')
+ aStrict = true;
+
+ var win = Services.wm.getMostRecentWindow(aWindowType);
+
+ if (win === null && aStrict) {
+ var message = 'No windows of type "' + aWindowType + '" were found';
+ throw new errors.UnexpectedError(message);
+ }
+
+ return win;
+}
+
+
+// Export of functions
+driver.getBrowserWindow = getBrowserWindow;
+driver.getHiddenWindow = getHiddenWindow;
+driver.openBrowserWindow = openBrowserWindow;
+driver.sleep = sleep;
+driver.waitFor = waitFor;
+
+driver.windowFilterByMethod = windowFilterByMethod;
+driver.windowFilterByTitle = windowFilterByTitle;
+driver.windowFilterByType = windowFilterByType;
+
+driver.getWindowsByAge = getWindowsByAge;
+driver.getNewestWindow = getNewestWindow;
+driver.getTopmostWindowByType = getTopmostWindowByType;
+
+
+// XXX Bug: 462222
+// Currently those functions cannot be used. So they shouldn't be exported.
+//driver.getWindowsByZOrder = getWindowsByZOrder;
+//driver.getTopmostWindow = getTopmostWindow;
diff --git a/services/sync/tps/extensions/mozmill/resource/modules/errors.js b/services/sync/tps/extensions/mozmill/resource/modules/errors.js
new file mode 100644
index 000000000..58d1a918a
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/modules/errors.js
@@ -0,0 +1,102 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['BaseError',
+ 'ApplicationQuitError',
+ 'AssertionError',
+ 'TimeoutError'];
+
+
+/**
+ * Creates a new instance of a base error
+ *
+ * @class Represents the base for custom errors
+ * @param {string} [aMessage=Error().message]
+ * The error message to show
+ * @param {string} [aFileName=Error().fileName]
+ * The file name where the error has been raised
+ * @param {string} [aLineNumber=Error().lineNumber]
+ * The line number of the file where the error has been raised
+ * @param {string} [aFunctionName=undefined]
+ * The function name in which the error has been raised
+ */
+function BaseError(aMessage, aFileName, aLineNumber, aFunctionName) {
+ this.name = this.constructor.name;
+
+ var err = new Error();
+ if (err.stack) {
+ this.stack = err.stack;
+ }
+
+ this.message = aMessage || err.message;
+ this.fileName = aFileName || err.fileName;
+ this.lineNumber = aLineNumber || err.lineNumber;
+ this.functionName = aFunctionName;
+}
+
+
+/**
+ * Creates a new instance of an application quit error used by Mozmill to
+ * indicate that the application is going to shutdown
+ *
+ * @class Represents an error object thrown when the application is going to shutdown
+ * @param {string} [aMessage=Error().message]
+ * The error message to show
+ * @param {string} [aFileName=Error().fileName]
+ * The file name where the error has been raised
+ * @param {string} [aLineNumber=Error().lineNumber]
+ * The line number of the file where the error has been raised
+ * @param {string} [aFunctionName=undefined]
+ * The function name in which the error has been raised
+ */
+function ApplicationQuitError(aMessage, aFileName, aLineNumber, aFunctionName) {
+ BaseError.apply(this, arguments);
+}
+
+ApplicationQuitError.prototype = Object.create(BaseError.prototype, {
+ constructor : { value : ApplicationQuitError }
+});
+
+
+/**
+ * Creates a new instance of an assertion error
+ *
+ * @class Represents an error object thrown by failing assertions
+ * @param {string} [aMessage=Error().message]
+ * The error message to show
+ * @param {string} [aFileName=Error().fileName]
+ * The file name where the error has been raised
+ * @param {string} [aLineNumber=Error().lineNumber]
+ * The line number of the file where the error has been raised
+ * @param {string} [aFunctionName=undefined]
+ * The function name in which the error has been raised
+ */
+function AssertionError(aMessage, aFileName, aLineNumber, aFunctionName) {
+ BaseError.apply(this, arguments);
+}
+
+AssertionError.prototype = Object.create(BaseError.prototype, {
+ constructor : { value : AssertionError }
+});
+
+/**
+ * Creates a new instance of a timeout error
+ *
+ * @class Represents an error object thrown by failing assertions
+ * @param {string} [aMessage=Error().message]
+ * The error message to show
+ * @param {string} [aFileName=Error().fileName]
+ * The file name where the error has been raised
+ * @param {string} [aLineNumber=Error().lineNumber]
+ * The line number of the file where the error has been raised
+ * @param {string} [aFunctionName=undefined]
+ * The function name in which the error has been raised
+ */
+function TimeoutError(aMessage, aFileName, aLineNumber, aFunctionName) {
+ AssertionError.apply(this, arguments);
+}
+
+TimeoutError.prototype = Object.create(AssertionError.prototype, {
+ constructor : { value : TimeoutError }
+});
diff --git a/services/sync/tps/extensions/mozmill/resource/modules/frame.js b/services/sync/tps/extensions/mozmill/resource/modules/frame.js
new file mode 100644
index 000000000..dae8276b6
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/modules/frame.js
@@ -0,0 +1,788 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['Collector','Runner','events', 'runTestFile', 'log',
+ 'timers', 'persisted', 'shutdownApplication'];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+const TIMEOUT_SHUTDOWN_HTTPD = 15000;
+
+Cu.import("resource://gre/modules/Services.jsm");
+
+Cu.import('resource://mozmill/stdlib/httpd.js');
+
+var broker = {}; Cu.import('resource://mozmill/driver/msgbroker.js', broker);
+var assertions = {}; Cu.import('resource://mozmill/modules/assertions.js', assertions);
+var errors = {}; Cu.import('resource://mozmill/modules/errors.js', errors);
+var os = {}; Cu.import('resource://mozmill/stdlib/os.js', os);
+var strings = {}; Cu.import('resource://mozmill/stdlib/strings.js', strings);
+var arrays = {}; Cu.import('resource://mozmill/stdlib/arrays.js', arrays);
+var withs = {}; Cu.import('resource://mozmill/stdlib/withs.js', withs);
+var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils);
+
+var securableModule = {};
+Cu.import('resource://mozmill/stdlib/securable-module.js', securableModule);
+
+var uuidgen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
+
+var httpd = null;
+var persisted = {};
+
+var assert = new assertions.Assert();
+var expect = new assertions.Expect();
+
+var mozmill = undefined;
+var mozelement = undefined;
+var modules = undefined;
+
+var timers = [];
+
+
+/**
+ * Shutdown or restart the application
+ *
+ * @param {boolean} [aFlags=undefined]
+ * Additional flags how to handle the shutdown or restart. The attributes
+ * eRestarti386 and eRestartx86_64 have not been documented yet.
+ * @see https://developer.mozilla.org/nsIAppStartup#Attributes
+ */
+function shutdownApplication(aFlags) {
+ var flags = Ci.nsIAppStartup.eForceQuit;
+
+ if (aFlags) {
+ flags |= aFlags;
+ }
+
+ // Send a request to shutdown the application. That will allow us and other
+ // components to finish up with any shutdown code. Please note that we don't
+ // care if other components or add-ons want to prevent this via cancelQuit,
+ // we really force the shutdown.
+ let cancelQuit = Components.classes["@mozilla.org/supports-PRBool;1"].
+ createInstance(Components.interfaces.nsISupportsPRBool);
+ Services.obs.notifyObservers(cancelQuit, "quit-application-requested", null);
+
+ // Use a timer to trigger the application restart, which will allow us to
+ // send an ACK packet via jsbridge if the method has been called via Python.
+ var event = {
+ notify: function(timer) {
+ Services.startup.quit(flags);
+ }
+ }
+
+ var timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ timer.initWithCallback(event, 100, Ci.nsITimer.TYPE_ONE_SHOT);
+}
+
+function stateChangeBase(possibilties, restrictions, target, cmeta, v) {
+ if (possibilties) {
+ if (!arrays.inArray(possibilties, v)) {
+ // TODO Error value not in this.poss
+ return;
+ }
+ }
+
+ if (restrictions) {
+ for (var i in restrictions) {
+ var r = restrictions[i];
+ if (!r(v)) {
+ // TODO error value did not pass restriction
+ return;
+ }
+ }
+ }
+
+ // Fire jsbridge notification, logging notification, listener notifications
+ events[target] = v;
+ events.fireEvent(cmeta, target);
+}
+
+
+var events = {
+ appQuit : false,
+ currentModule : null,
+ currentState : null,
+ currentTest : null,
+ shutdownRequested : false,
+ userShutdown : null,
+ userShutdownTimer : null,
+
+ listeners : {},
+ globalListeners : []
+}
+
+events.setState = function (v) {
+ return stateChangeBase(['dependencies', 'setupModule', 'teardownModule',
+ 'test', 'setupTest', 'teardownTest', 'collection'],
+ null, 'currentState', 'setState', v);
+}
+
+events.toggleUserShutdown = function (obj){
+ if (!this.userShutdown) {
+ this.userShutdown = obj;
+
+ var event = {
+ notify: function(timer) {
+ events.toggleUserShutdown(obj);
+ }
+ }
+
+ this.userShutdownTimer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ this.userShutdownTimer.initWithCallback(event, obj.timeout, Ci.nsITimer.TYPE_ONE_SHOT);
+
+ } else {
+ this.userShutdownTimer.cancel();
+
+ // If the application is not going to shutdown, the user shutdown failed and
+ // we have to force a shutdown.
+ if (!events.appQuit) {
+ this.fail({'function':'events.toggleUserShutdown',
+ 'message':'Shutdown expected but none detected before timeout',
+ 'userShutdown': obj});
+
+ var flags = Ci.nsIAppStartup.eAttemptQuit;
+ if (events.isRestartShutdown()) {
+ flags |= Ci.nsIAppStartup.eRestart;
+ }
+
+ shutdownApplication(flags);
+ }
+ }
+}
+
+events.isUserShutdown = function () {
+ return this.userShutdown ? this.userShutdown["user"] : false;
+}
+
+events.isRestartShutdown = function () {
+ return this.userShutdown.restart;
+}
+
+events.startShutdown = function (obj) {
+ events.fireEvent('shutdown', obj);
+
+ if (obj["user"]) {
+ events.toggleUserShutdown(obj);
+ } else {
+ shutdownApplication(obj.flags);
+ }
+}
+
+events.setTest = function (test) {
+ test.__start__ = Date.now();
+ test.__passes__ = [];
+ test.__fails__ = [];
+
+ events.currentTest = test;
+
+ var obj = {'filename': events.currentModule.__file__,
+ 'name': test.__name__}
+ events.fireEvent('setTest', obj);
+}
+
+events.endTest = function (test) {
+ // use the current test unless specified
+ if (test === undefined) {
+ test = events.currentTest;
+ }
+
+ // If no test is set it has already been reported. Beside that we don't want
+ // to report it a second time.
+ if (!test || test.status === 'done')
+ return;
+
+ // report the end of a test
+ test.__end__ = Date.now();
+ test.status = 'done';
+
+ var obj = {'filename': events.currentModule.__file__,
+ 'passed': test.__passes__.length,
+ 'failed': test.__fails__.length,
+ 'passes': test.__passes__,
+ 'fails' : test.__fails__,
+ 'name' : test.__name__,
+ 'time_start': test.__start__,
+ 'time_end': test.__end__}
+
+ if (test.skipped) {
+ obj['skipped'] = true;
+ obj.skipped_reason = test.skipped_reason;
+ }
+
+ if (test.meta) {
+ obj.meta = test.meta;
+ }
+
+ // Report the test result only if the test is a true test or if it is failing
+ if (withs.startsWith(test.__name__, "test") || test.__fails__.length > 0) {
+ events.fireEvent('endTest', obj);
+ }
+}
+
+events.setModule = function (aModule) {
+ aModule.__start__ = Date.now();
+ aModule.__status__ = 'running';
+
+ var result = stateChangeBase(null,
+ [function (aModule) {return (aModule.__file__ != undefined)}],
+ 'currentModule', 'setModule', aModule);
+
+ return result;
+}
+
+events.endModule = function (aModule) {
+ // It should only reported once, so check if it already has been done
+ if (aModule.__status__ === 'done')
+ return;
+
+ aModule.__end__ = Date.now();
+ aModule.__status__ = 'done';
+
+ var obj = {
+ 'filename': aModule.__file__,
+ 'time_start': aModule.__start__,
+ 'time_end': aModule.__end__
+ }
+
+ events.fireEvent('endModule', obj);
+}
+
+events.pass = function (obj) {
+ // a low level event, such as a keystroke, succeeds
+ if (events.currentTest) {
+ events.currentTest.__passes__.push(obj);
+ }
+
+ for (var timer of timers) {
+ timer.actions.push(
+ {"currentTest": events.currentModule.__file__ + "::" + events.currentTest.__name__,
+ "obj": obj,
+ "result": "pass"}
+ );
+ }
+
+ events.fireEvent('pass', obj);
+}
+
+events.fail = function (obj) {
+ var error = obj.exception;
+
+ if (error) {
+ // Error objects aren't enumerable https://bugzilla.mozilla.org/show_bug.cgi?id=637207
+ obj.exception = {
+ name: error.name,
+ message: error.message,
+ lineNumber: error.lineNumber,
+ fileName: error.fileName,
+ stack: error.stack
+ };
+ }
+
+ // a low level event, such as a keystroke, fails
+ if (events.currentTest) {
+ events.currentTest.__fails__.push(obj);
+ }
+
+ for (var time of timers) {
+ timer.actions.push(
+ {"currentTest": events.currentModule.__file__ + "::" + events.currentTest.__name__,
+ "obj": obj,
+ "result": "fail"}
+ );
+ }
+
+ events.fireEvent('fail', obj);
+}
+
+events.skip = function (reason) {
+ // this is used to report skips associated with setupModule and nothing else
+ events.currentTest.skipped = true;
+ events.currentTest.skipped_reason = reason;
+
+ for (var timer of timers) {
+ timer.actions.push(
+ {"currentTest": events.currentModule.__file__ + "::" + events.currentTest.__name__,
+ "obj": reason,
+ "result": "skip"}
+ );
+ }
+
+ events.fireEvent('skip', reason);
+}
+
+events.fireEvent = function (name, obj) {
+ if (events.appQuit) {
+ // dump('* Event discarded: ' + name + ' ' + JSON.stringify(obj) + '\n');
+ return;
+ }
+
+ if (this.listeners[name]) {
+ for (var i in this.listeners[name]) {
+ this.listeners[name][i](obj);
+ }
+ }
+
+ for (var listener of this.globalListeners) {
+ listener(name, obj);
+ }
+}
+
+events.addListener = function (name, listener) {
+ if (this.listeners[name]) {
+ this.listeners[name].push(listener);
+ } else if (name == '') {
+ this.globalListeners.push(listener)
+ } else {
+ this.listeners[name] = [listener];
+ }
+}
+
+events.removeListener = function (listener) {
+ for (var listenerIndex in this.listeners) {
+ var e = this.listeners[listenerIndex];
+
+ for (var i in e){
+ if (e[i] == listener) {
+ this.listeners[listenerIndex] = arrays.remove(e, i);
+ }
+ }
+ }
+
+ for (var i in this.globalListeners) {
+ if (this.globalListeners[i] == listener) {
+ this.globalListeners = arrays.remove(this.globalListeners, i);
+ }
+ }
+}
+
+events.persist = function () {
+ try {
+ events.fireEvent('persist', persisted);
+ } catch (e) {
+ events.fireEvent('error', "persist serialization failed.")
+ }
+}
+
+events.firePythonCallback = function (obj) {
+ obj['test'] = events.currentModule.__file__;
+ events.fireEvent('firePythonCallback', obj);
+}
+
+events.screenshot = function (obj) {
+ // Find the name of the test function
+ for (var attr in events.currentModule) {
+ if (events.currentModule[attr] == events.currentTest) {
+ var testName = attr;
+ break;
+ }
+ }
+
+ obj['test_file'] = events.currentModule.__file__;
+ obj['test_name'] = testName;
+ events.fireEvent('screenshot', obj);
+}
+
+var log = function (obj) {
+ events.fireEvent('log', obj);
+}
+
+// Register the listeners
+broker.addObject({'endTest': events.endTest,
+ 'fail': events.fail,
+ 'firePythonCallback': events.firePythonCallback,
+ 'log': log,
+ 'pass': events.pass,
+ 'persist': events.persist,
+ 'screenshot': events.screenshot,
+ 'shutdown': events.startShutdown,
+ });
+
+try {
+ Cu.import('resource://jsbridge/modules/Events.jsm');
+
+ events.addListener('', function (name, obj) {
+ Events.fireEvent('mozmill.' + name, obj);
+ });
+} catch (e) {
+ Services.console.logStringMessage("Event module of JSBridge not available.");
+}
+
+
+/**
+ * Observer for notifications when the application is going to shutdown
+ */
+function AppQuitObserver() {
+ this.runner = null;
+
+ Services.obs.addObserver(this, "quit-application-requested", false);
+}
+
+AppQuitObserver.prototype = {
+ observe: function (aSubject, aTopic, aData) {
+ switch (aTopic) {
+ case "quit-application-requested":
+ Services.obs.removeObserver(this, "quit-application-requested");
+
+ // If we observe a quit notification make sure to send the
+ // results of the current test. In those cases we don't reach
+ // the equivalent code in runTestModule()
+ events.pass({'message': 'AppQuitObserver: ' + JSON.stringify(aData),
+ 'userShutdown': events.userShutdown});
+
+ if (this.runner) {
+ this.runner.end();
+ }
+
+ if (httpd) {
+ httpd.stop();
+ }
+
+ events.appQuit = true;
+
+ break;
+ }
+ }
+}
+
+var appQuitObserver = new AppQuitObserver();
+
+/**
+ * The collector handles HTTPd.js and initilizing the module
+ */
+function Collector() {
+ this.test_modules_by_filename = {};
+ this.testing = [];
+}
+
+Collector.prototype.addHttpResource = function (aDirectory, aPath) {
+ var fp = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsILocalFile);
+ fp.initWithPath(os.abspath(aDirectory, this.current_file));
+
+ return httpd.addHttpResource(fp, aPath);
+}
+
+Collector.prototype.initTestModule = function (filename, testname) {
+ var test_module = this.loadFile(filename, this);
+ var has_restarted = !(testname == null);
+ test_module.__tests__ = [];
+
+ for (var i in test_module) {
+ if (typeof(test_module[i]) == "function") {
+ test_module[i].__name__ = i;
+
+ // Only run setupModule if we are a single test OR if we are the first
+ // test of a restart chain (don't run it prior to members in a restart
+ // chain)
+ if (i == "setupModule" && !has_restarted) {
+ test_module.__setupModule__ = test_module[i];
+ } else if (i == "setupTest") {
+ test_module.__setupTest__ = test_module[i];
+ } else if (i == "teardownTest") {
+ test_module.__teardownTest__ = test_module[i];
+ } else if (i == "teardownModule") {
+ test_module.__teardownModule__ = test_module[i];
+ } else if (withs.startsWith(i, "test")) {
+ if (testname && (i != testname)) {
+ continue;
+ }
+
+ testname = null;
+ test_module.__tests__.push(test_module[i]);
+ }
+ }
+ }
+
+ test_module.collector = this;
+ test_module.status = 'loaded';
+
+ this.test_modules_by_filename[filename] = test_module;
+
+ return test_module;
+}
+
+Collector.prototype.loadFile = function (path, collector) {
+ var moduleLoader = new securableModule.Loader({
+ rootPaths: ["resource://mozmill/modules/"],
+ defaultPrincipal: "system",
+ globals : { Cc: Cc,
+ Ci: Ci,
+ Cu: Cu,
+ Cr: Components.results}
+ });
+
+ // load a test module from a file and add some candy
+ var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsILocalFile);
+ file.initWithPath(path);
+ var uri = Services.io.newFileURI(file).spec;
+
+ this.loadTestResources();
+
+ var systemPrincipal = Services.scriptSecurityManager.getSystemPrincipal();
+ var module = new Components.utils.Sandbox(systemPrincipal);
+ module.assert = assert;
+ module.Cc = Cc;
+ module.Ci = Ci;
+ module.Cr = Components.results;
+ module.Cu = Cu;
+ module.collector = collector;
+ module.driver = moduleLoader.require("driver");
+ module.elementslib = mozelement;
+ module.errors = errors;
+ module.expect = expect;
+ module.findElement = mozelement;
+ module.log = log;
+ module.mozmill = mozmill;
+ module.persisted = persisted;
+
+ module.require = function (mod) {
+ var loader = new securableModule.Loader({
+ rootPaths: [Services.io.newFileURI(file.parent).spec,
+ "resource://mozmill/modules/"],
+ defaultPrincipal: "system",
+ globals : { assert: assert,
+ expect: expect,
+ mozmill: mozmill,
+ elementslib: mozelement, // This a quick hack to maintain backwards compatibility with 1.5.x
+ findElement: mozelement,
+ persisted: persisted,
+ Cc: Cc,
+ Ci: Ci,
+ Cu: Cu,
+ log: log }
+ });
+
+ if (modules != undefined) {
+ loader.modules = modules;
+ }
+
+ var retval = loader.require(mod);
+ modules = loader.modules;
+
+ return retval;
+ }
+
+ if (collector != undefined) {
+ collector.current_file = file;
+ collector.current_path = path;
+ }
+
+ try {
+ Services.scriptloader.loadSubScript(uri, module, "UTF-8");
+ } catch (e) {
+ var obj = {
+ 'filename': path,
+ 'passed': 0,
+ 'failed': 1,
+ 'passes': [],
+ 'fails' : [{'exception' : {
+ message: e.message,
+ filename: e.filename,
+ lineNumber: e.lineNumber}}],
+ 'name' :'<TOP_LEVEL>'
+ };
+
+ events.fail({'exception': e});
+ events.fireEvent('endTest', obj);
+ }
+
+ module.__file__ = path;
+ module.__uri__ = uri;
+
+ return module;
+}
+
+Collector.prototype.loadTestResources = function () {
+ // load resources we want in our tests
+ if (mozmill === undefined) {
+ mozmill = {};
+ Cu.import("resource://mozmill/driver/mozmill.js", mozmill);
+ }
+ if (mozelement === undefined) {
+ mozelement = {};
+ Cu.import("resource://mozmill/driver/mozelement.js", mozelement);
+ }
+}
+
+
+/**
+ *
+ */
+function Httpd(aPort) {
+ this.http_port = aPort;
+
+ while (true) {
+ try {
+ var srv = new HttpServer();
+ srv.registerContentType("sjs", "sjs");
+ srv.identity.setPrimary("http", "localhost", this.http_port);
+ srv.start(this.http_port);
+
+ this._httpd = srv;
+ break;
+ }
+ catch (e) {
+ // Failure most likely due to port conflict
+ this.http_port++;
+ }
+ }
+}
+
+Httpd.prototype.addHttpResource = function (aDir, aPath) {
+ var path = aPath ? ("/" + aPath + "/") : "/";
+
+ try {
+ this._httpd.registerDirectory(path, aDir);
+ return 'http://localhost:' + this.http_port + path;
+ }
+ catch (e) {
+ throw Error("Failure to register directory: " + aDir.path);
+ }
+};
+
+Httpd.prototype.stop = function () {
+ if (!this._httpd) {
+ return;
+ }
+
+ var shutdown = false;
+ this._httpd.stop(function () { shutdown = true; });
+
+ assert.waitFor(function () {
+ return shutdown;
+ }, "Local HTTP server has been stopped", TIMEOUT_SHUTDOWN_HTTPD);
+
+ this._httpd = null;
+};
+
+function startHTTPd() {
+ if (!httpd) {
+ // Ensure that we start the HTTP server only once during a session
+ httpd = new Httpd(43336);
+ }
+}
+
+
+function Runner() {
+ this.collector = new Collector();
+ this.ended = false;
+
+ var m = {}; Cu.import('resource://mozmill/driver/mozmill.js', m);
+ this.platform = m.platform;
+
+ events.fireEvent('startRunner', true);
+}
+
+Runner.prototype.end = function () {
+ if (!this.ended) {
+ this.ended = true;
+
+ appQuitObserver.runner = null;
+
+ events.endTest();
+ events.endModule(events.currentModule);
+ events.fireEvent('endRunner', true);
+ events.persist();
+ }
+};
+
+Runner.prototype.runTestFile = function (filename, name) {
+ var module = this.collector.initTestModule(filename, name);
+ this.runTestModule(module);
+};
+
+Runner.prototype.runTestModule = function (module) {
+ appQuitObserver.runner = this;
+ events.setModule(module);
+
+ // If setupModule passes, run all the tests. Otherwise mark them as skipped.
+ if (this.execFunction(module.__setupModule__, module)) {
+ for (var test of module.__tests__) {
+ if (events.shutdownRequested) {
+ break;
+ }
+
+ // If setupTest passes, run the test. Otherwise mark it as skipped.
+ if (this.execFunction(module.__setupTest__, module)) {
+ this.execFunction(test);
+ } else {
+ this.skipFunction(test, module.__setupTest__.__name__ + " failed");
+ }
+
+ this.execFunction(module.__teardownTest__, module);
+ }
+
+ } else {
+ for (var test of module.__tests__) {
+ this.skipFunction(test, module.__setupModule__.__name__ + " failed");
+ }
+ }
+
+ this.execFunction(module.__teardownModule__, module);
+ events.endModule(module);
+};
+
+Runner.prototype.execFunction = function (func, arg) {
+ if (typeof func !== "function" || events.shutdownRequested) {
+ return true;
+ }
+
+ var isTest = withs.startsWith(func.__name__, "test");
+
+ events.setState(isTest ? "test" : func.__name);
+ events.setTest(func);
+
+ // skip excluded platforms
+ if (func.EXCLUDED_PLATFORMS != undefined) {
+ if (arrays.inArray(func.EXCLUDED_PLATFORMS, this.platform)) {
+ events.skip("Platform exclusion");
+ events.endTest(func);
+ return false;
+ }
+ }
+
+ // skip function if requested
+ if (func.__force_skip__ != undefined) {
+ events.skip(func.__force_skip__);
+ events.endTest(func);
+ return false;
+ }
+
+ // execute the test function
+ try {
+ func(arg);
+ } catch (e) {
+ if (e instanceof errors.ApplicationQuitError) {
+ events.shutdownRequested = true;
+ } else {
+ events.fail({'exception': e, 'test': func})
+ }
+ }
+
+ // If a user shutdown has been requested and the function already returned,
+ // we can assume that a shutdown will not happen anymore. We should force a
+ // shutdown then, to prevent the next test from being executed.
+ if (events.isUserShutdown()) {
+ events.shutdownRequested = true;
+ events.toggleUserShutdown(events.userShutdown);
+ }
+
+ events.endTest(func);
+ return events.currentTest.__fails__.length == 0;
+};
+
+function runTestFile(filename, name) {
+ var runner = new Runner();
+ runner.runTestFile(filename, name);
+ runner.end();
+
+ return true;
+}
+
+Runner.prototype.skipFunction = function (func, message) {
+ events.setTest(func);
+ events.skip(message);
+ events.endTest(func);
+};
diff --git a/services/sync/tps/extensions/mozmill/resource/modules/l10n.js b/services/sync/tps/extensions/mozmill/resource/modules/l10n.js
new file mode 100644
index 000000000..63a355421
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/modules/l10n.js
@@ -0,0 +1,71 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * @namespace Defines useful methods to work with localized content
+ */
+var l10n = exports;
+
+Cu.import("resource://gre/modules/Services.jsm");
+
+/**
+ * Retrieve the localized content for a given DTD entity
+ *
+ * @memberOf l10n
+ * @param {String[]} aDTDs Array of URLs for DTD files.
+ * @param {String} aEntityId ID of the entity to get the localized content of.
+ *
+ * @returns {String} Localized content
+ */
+function getEntity(aDTDs, aEntityId) {
+ // Add xhtml11.dtd to prevent missing entity errors with XHTML files
+ aDTDs.push("resource:///res/dtd/xhtml11.dtd");
+
+ // Build a string of external entities
+ var references = "";
+ for (i = 0; i < aDTDs.length; i++) {
+ var id = 'dtd' + i;
+ references += '<!ENTITY % ' + id + ' SYSTEM "' + aDTDs[i] + '">%' + id + ';';
+ }
+
+ var header = '<?xml version="1.0"?><!DOCTYPE elem [' + references + ']>';
+ var element = '<elem id="entity">&' + aEntityId + ';</elem>';
+ var content = header + element;
+
+ var parser = Cc["@mozilla.org/xmlextras/domparser;1"].
+ createInstance(Ci.nsIDOMParser);
+ var doc = parser.parseFromString(content, 'text/xml');
+ var node = doc.querySelector('elem[id="entity"]');
+
+ if (!node) {
+ throw new Error("Unkown entity '" + aEntityId + "'");
+ }
+
+ return node.textContent;
+}
+
+
+/**
+ * Retrieve the localized content for a given property
+ *
+ * @memberOf l10n
+ * @param {String} aURL URL of the .properties file.
+ * @param {String} aProperty The property to get the value of.
+ *
+ * @returns {String} Value of the requested property
+ */
+function getProperty(aURL, aProperty) {
+ var bundle = Services.strings.createBundle(aURL);
+
+ try {
+ return bundle.GetStringFromName(aProperty);
+ } catch (ex) {
+ throw new Error("Unkown property '" + aProperty + "'");
+ }
+}
+
+
+// Export of functions
+l10n.getEntity = getEntity;
+l10n.getProperty = getProperty;
diff --git a/services/sync/tps/extensions/mozmill/resource/modules/stack.js b/services/sync/tps/extensions/mozmill/resource/modules/stack.js
new file mode 100644
index 000000000..889316bf1
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/modules/stack.js
@@ -0,0 +1,43 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['findCallerFrame'];
+
+
+/**
+ * @namespace Defines utility methods for handling stack frames
+ */
+
+/**
+ * Find the frame to use for logging the test result. If a start frame has
+ * been specified, we walk down the stack until a frame with the same filename
+ * as the start frame has been found. The next file in the stack will be the
+ * frame to use for logging the result.
+ *
+ * @memberOf stack
+ * @param {Object} [aStartFrame=Components.stack] Frame to start from walking up the stack.
+ * @returns {Object} Frame of the stack to use for logging the result.
+ */
+function findCallerFrame(aStartFrame) {
+ let frame = Components.stack;
+ let filename = frame.filename.replace(/(.*)-> /, "");
+
+ // If a start frame has been specified, walk up the stack until we have
+ // found the corresponding file
+ if (aStartFrame) {
+ filename = aStartFrame.filename.replace(/(.*)-> /, "");
+
+ while (frame.caller &&
+ frame.filename && (frame.filename.indexOf(filename) == -1)) {
+ frame = frame.caller;
+ }
+ }
+
+ // Walk even up more until the next file has been found
+ while (frame.caller &&
+ (!frame.filename || (frame.filename.indexOf(filename) != -1)))
+ frame = frame.caller;
+
+ return frame;
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/modules/windows.js b/services/sync/tps/extensions/mozmill/resource/modules/windows.js
new file mode 100644
index 000000000..1c75a2d3d
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/modules/windows.js
@@ -0,0 +1,292 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ["init", "map"];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+// imports
+var utils = {}; Cu.import('resource://mozmill/stdlib/utils.js', utils);
+
+var uuidgen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
+
+/**
+ * The window map is used to store information about the current state of
+ * open windows, e.g. loaded state
+ */
+var map = {
+ _windows : { },
+
+ /**
+ * Check if a given window id is contained in the map of windows
+ *
+ * @param {Number} aWindowId
+ * Outer ID of the window to check.
+ * @returns {Boolean} True if the window is part of the map, otherwise false.
+ */
+ contains : function (aWindowId) {
+ return (aWindowId in this._windows);
+ },
+
+ /**
+ * Retrieve the value of the specified window's property.
+ *
+ * @param {Number} aWindowId
+ * Outer ID of the window to check.
+ * @param {String} aProperty
+ * Property to retrieve the value from
+ * @return {Object} Value of the window's property
+ */
+ getValue : function (aWindowId, aProperty) {
+ if (!this.contains(aWindowId)) {
+ return undefined;
+ } else {
+ var win = this._windows[aWindowId];
+
+ return (aProperty in win) ? win[aProperty]
+ : undefined;
+ }
+ },
+
+ /**
+ * Remove the entry for a given window
+ *
+ * @param {Number} aWindowId
+ * Outer ID of the window to check.
+ */
+ remove : function (aWindowId) {
+ if (this.contains(aWindowId)) {
+ delete this._windows[aWindowId];
+ }
+
+ // dump("* current map: " + JSON.stringify(this._windows) + "\n");
+ },
+
+ /**
+ * Update the property value of a given window
+ *
+ * @param {Number} aWindowId
+ * Outer ID of the window to check.
+ * @param {String} aProperty
+ * Property to update the value for
+ * @param {Object}
+ * Value to set
+ */
+ update : function (aWindowId, aProperty, aValue) {
+ if (!this.contains(aWindowId)) {
+ this._windows[aWindowId] = { };
+ }
+
+ this._windows[aWindowId][aProperty] = aValue;
+ // dump("* current map: " + JSON.stringify(this._windows) + "\n");
+ },
+
+ /**
+ * Update the internal loaded state of the given content window. To identify
+ * an active (re)load action we make use of an uuid.
+ *
+ * @param {Window} aId - The outer id of the window to update
+ * @param {Boolean} aIsLoaded - Has the window been loaded
+ */
+ updatePageLoadStatus : function (aId, aIsLoaded) {
+ this.update(aId, "loaded", aIsLoaded);
+
+ var uuid = this.getValue(aId, "id_load_in_transition");
+
+ // If no uuid has been set yet or when the page gets unloaded create a new id
+ if (!uuid || !aIsLoaded) {
+ uuid = uuidgen.generateUUID();
+ this.update(aId, "id_load_in_transition", uuid);
+ }
+
+ // dump("*** Page status updated: id=" + aId + ", loaded=" + aIsLoaded + ", uuid=" + uuid + "\n");
+ },
+
+ /**
+ * This method only applies to content windows, where we have to check if it has
+ * been successfully loaded or reloaded. An uuid allows us to wait for the next
+ * load action triggered by e.g. controller.open().
+ *
+ * @param {Window} aId - The outer id of the content window to check
+ *
+ * @returns {Boolean} True if the content window has been loaded
+ */
+ hasPageLoaded : function (aId) {
+ var load_current = this.getValue(aId, "id_load_in_transition");
+ var load_handled = this.getValue(aId, "id_load_handled");
+
+ var isLoaded = this.contains(aId) && this.getValue(aId, "loaded") &&
+ (load_current !== load_handled);
+
+ if (isLoaded) {
+ // Backup the current uuid so we can check later if another page load happened.
+ this.update(aId, "id_load_handled", load_current);
+ }
+
+ // dump("** Page has been finished loading: id=" + aId + ", status=" + isLoaded + ", uuid=" + load_current + "\n");
+
+ return isLoaded;
+ }
+};
+
+
+// Observer when a new top-level window is ready
+var windowReadyObserver = {
+ observe: function (aSubject, aTopic, aData) {
+ // Not in all cases we get a ChromeWindow. So ensure we really operate
+ // on such an instance. Otherwise load events will not be handled.
+ var win = utils.getChromeWindow(aSubject);
+
+ // var id = utils.getWindowId(win);
+ // dump("*** 'toplevel-window-ready' observer notification: id=" + id + "\n");
+ attachEventListeners(win);
+ }
+};
+
+
+// Observer when a top-level window is closed
+var windowCloseObserver = {
+ observe: function (aSubject, aTopic, aData) {
+ var id = utils.getWindowId(aSubject);
+ // dump("*** 'outer-window-destroyed' observer notification: id=" + id + "\n");
+
+ map.remove(id);
+ }
+};
+
+// Bug 915554
+// Support for the old Private Browsing Mode (eg. ESR17)
+// TODO: remove once ESR17 is no longer supported
+var enterLeavePrivateBrowsingObserver = {
+ observe: function (aSubject, aTopic, aData) {
+ handleAttachEventListeners();
+ }
+};
+
+/**
+ * Attach event listeners
+ *
+ * @param {ChromeWindow} aWindow
+ * Window to attach listeners on.
+ */
+function attachEventListeners(aWindow) {
+ // These are the event handlers
+ var pageShowHandler = function (aEvent) {
+ var doc = aEvent.originalTarget;
+
+ // Only update the flag if we have a document as target
+ // see https://bugzilla.mozilla.org/show_bug.cgi?id=690829
+ if ("defaultView" in doc) {
+ var id = utils.getWindowId(doc.defaultView);
+ // dump("*** 'pageshow' event: id=" + id + ", baseURI=" + doc.baseURI + "\n");
+ map.updatePageLoadStatus(id, true);
+ }
+
+ // We need to add/remove the unload/pagehide event listeners to preserve caching.
+ aWindow.addEventListener("beforeunload", beforeUnloadHandler, true);
+ aWindow.addEventListener("pagehide", pageHideHandler, true);
+ };
+
+ var DOMContentLoadedHandler = function (aEvent) {
+ var doc = aEvent.originalTarget;
+
+ // Only update the flag if we have a document as target
+ if ("defaultView" in doc) {
+ var id = utils.getWindowId(doc.defaultView);
+ // dump("*** 'DOMContentLoaded' event: id=" + id + ", baseURI=" + doc.baseURI + "\n");
+
+ // We only care about error pages for DOMContentLoaded
+ var errorRegex = /about:.+(error)|(blocked)\?/;
+ if (errorRegex.exec(doc.baseURI)) {
+ // Wait about 1s to be sure the DOM is ready
+ utils.sleep(1000);
+
+ map.updatePageLoadStatus(id, true);
+ }
+
+ // We need to add/remove the unload event listener to preserve caching.
+ aWindow.addEventListener("beforeunload", beforeUnloadHandler, true);
+ }
+ };
+
+ // beforeunload is still needed because pagehide doesn't fire before the page is unloaded.
+ // still use pagehide for cases when beforeunload doesn't get fired
+ var beforeUnloadHandler = function (aEvent) {
+ var doc = aEvent.originalTarget;
+
+ // Only update the flag if we have a document as target
+ if ("defaultView" in doc) {
+ var id = utils.getWindowId(doc.defaultView);
+ // dump("*** 'beforeunload' event: id=" + id + ", baseURI=" + doc.baseURI + "\n");
+ map.updatePageLoadStatus(id, false);
+ }
+
+ aWindow.removeEventListener("beforeunload", beforeUnloadHandler, true);
+ };
+
+ var pageHideHandler = function (aEvent) {
+ var doc = aEvent.originalTarget;
+
+ // Only update the flag if we have a document as target
+ if ("defaultView" in doc) {
+ var id = utils.getWindowId(doc.defaultView);
+ // dump("*** 'pagehide' event: id=" + id + ", baseURI=" + doc.baseURI + "\n");
+ map.updatePageLoadStatus(id, false);
+ }
+ // If event.persisted is true the beforeUnloadHandler would never fire
+ // and we have to remove the event handler here to avoid memory leaks.
+ if (aEvent.persisted)
+ aWindow.removeEventListener("beforeunload", beforeUnloadHandler, true);
+ };
+
+ var onWindowLoaded = function (aEvent) {
+ var id = utils.getWindowId(aWindow);
+ // dump("*** 'load' event: id=" + id + ", baseURI=" + aWindow.document.baseURI + "\n");
+
+ map.update(id, "loaded", true);
+
+ // Note: Error pages will never fire a "pageshow" event. For those we
+ // have to wait for the "DOMContentLoaded" event. That's the final state.
+ // Error pages will always have a baseURI starting with
+ // "about:" followed by "error" or "blocked".
+ aWindow.addEventListener("DOMContentLoaded", DOMContentLoadedHandler, true);
+
+ // Page is ready
+ aWindow.addEventListener("pageshow", pageShowHandler, true);
+
+ // Leave page (use caching)
+ aWindow.addEventListener("pagehide", pageHideHandler, true);
+ };
+
+ // If the window has already been finished loading, call the load handler
+ // directly. Otherwise attach it to the current window.
+ if (aWindow.document.readyState === 'complete') {
+ onWindowLoaded();
+ } else {
+ aWindow.addEventListener("load", onWindowLoaded, false);
+ }
+}
+
+// Attach event listeners to all already open top-level windows
+function handleAttachEventListeners() {
+ var enumerator = Cc["@mozilla.org/appshell/window-mediator;1"].
+ getService(Ci.nsIWindowMediator).getEnumerator("");
+ while (enumerator.hasMoreElements()) {
+ var win = enumerator.getNext();
+ attachEventListeners(win);
+ }
+}
+
+function init() {
+ // Activate observer for new top level windows
+ var observerService = Cc["@mozilla.org/observer-service;1"].
+ getService(Ci.nsIObserverService);
+ observerService.addObserver(windowReadyObserver, "toplevel-window-ready", false);
+ observerService.addObserver(windowCloseObserver, "outer-window-destroyed", false);
+ observerService.addObserver(enterLeavePrivateBrowsingObserver, "private-browsing", false);
+
+ handleAttachEventListeners();
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js b/services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js
new file mode 100644
index 000000000..7f08469f0
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/EventUtils.js
@@ -0,0 +1,823 @@
+// Export all available functions for Mozmill
+var EXPORTED_SYMBOLS = ["disableNonTestMouseEvents","sendMouseEvent", "sendChar",
+ "sendString", "sendKey", "synthesizeMouse", "synthesizeTouch",
+ "synthesizeMouseAtPoint", "synthesizeTouchAtPoint",
+ "synthesizeMouseAtCenter", "synthesizeTouchAtCenter",
+ "synthesizeWheel", "synthesizeKey",
+ "synthesizeMouseExpectEvent", "synthesizeKeyExpectEvent",
+ "synthesizeText",
+ "synthesizeComposition", "synthesizeQuerySelectedText"];
+
+var Ci = Components.interfaces;
+var Cc = Components.classes;
+
+var window = Cc["@mozilla.org/appshell/appShellService;1"]
+ .getService(Ci.nsIAppShellService).hiddenDOMWindow;
+
+var _EU_Ci = Ci;
+var navigator = window.navigator;
+var KeyEvent = window.KeyEvent;
+var parent = window.parent;
+
+function is(aExpression1, aExpression2, aMessage) {
+ if (aExpression1 !== aExpression2) {
+ throw new Error(aMessage);
+ }
+}
+
+/**
+ * EventUtils provides some utility methods for creating and sending DOM events.
+ * Current methods:
+ * sendMouseEvent
+ * sendChar
+ * sendString
+ * sendKey
+ * synthesizeMouse
+ * synthesizeMouseAtCenter
+ * synthesizeWheel
+ * synthesizeKey
+ * synthesizeMouseExpectEvent
+ * synthesizeKeyExpectEvent
+ *
+ * When adding methods to this file, please add a performance test for it.
+ */
+
+/**
+ * Send a mouse event to the node aTarget (aTarget can be an id, or an
+ * actual node) . The "event" passed in to aEvent is just a JavaScript
+ * object with the properties set that the real mouse event object should
+ * have. This includes the type of the mouse event.
+ * E.g. to send an click event to the node with id 'node' you might do this:
+ *
+ * sendMouseEvent({type:'click'}, 'node');
+ */
+function getElement(id) {
+ return ((typeof(id) == "string") ?
+ document.getElementById(id) : id);
+};
+
+this.$ = this.getElement;
+
+function sendMouseEvent(aEvent, aTarget, aWindow) {
+ if (['click', 'dblclick', 'mousedown', 'mouseup', 'mouseover', 'mouseout'].indexOf(aEvent.type) == -1) {
+ throw new Error("sendMouseEvent doesn't know about event type '" + aEvent.type + "'");
+ }
+
+ if (!aWindow) {
+ aWindow = window;
+ }
+
+ if (!(aTarget instanceof aWindow.Element)) {
+ aTarget = aWindow.document.getElementById(aTarget);
+ }
+
+ var event = aWindow.document.createEvent('MouseEvent');
+
+ var typeArg = aEvent.type;
+ var canBubbleArg = true;
+ var cancelableArg = true;
+ var viewArg = aWindow;
+ var detailArg = aEvent.detail || (aEvent.type == 'click' ||
+ aEvent.type == 'mousedown' ||
+ aEvent.type == 'mouseup' ? 1 :
+ aEvent.type == 'dblclick'? 2 : 0);
+ var screenXArg = aEvent.screenX || 0;
+ var screenYArg = aEvent.screenY || 0;
+ var clientXArg = aEvent.clientX || 0;
+ var clientYArg = aEvent.clientY || 0;
+ var ctrlKeyArg = aEvent.ctrlKey || false;
+ var altKeyArg = aEvent.altKey || false;
+ var shiftKeyArg = aEvent.shiftKey || false;
+ var metaKeyArg = aEvent.metaKey || false;
+ var buttonArg = aEvent.button || 0;
+ var relatedTargetArg = aEvent.relatedTarget || null;
+
+ event.initMouseEvent(typeArg, canBubbleArg, cancelableArg, viewArg, detailArg,
+ screenXArg, screenYArg, clientXArg, clientYArg,
+ ctrlKeyArg, altKeyArg, shiftKeyArg, metaKeyArg,
+ buttonArg, relatedTargetArg);
+
+ SpecialPowers.dispatchEvent(aWindow, aTarget, event);
+}
+
+/**
+ * Send the char aChar to the focused element. This method handles casing of
+ * chars (sends the right charcode, and sends a shift key for uppercase chars).
+ * No other modifiers are handled at this point.
+ *
+ * For now this method only works for ASCII characters and emulates the shift
+ * key state on US keyboard layout.
+ */
+function sendChar(aChar, aWindow) {
+ var hasShift;
+ // Emulate US keyboard layout for the shiftKey state.
+ switch (aChar) {
+ case "!":
+ case "@":
+ case "#":
+ case "$":
+ case "%":
+ case "^":
+ case "&":
+ case "*":
+ case "(":
+ case ")":
+ case "_":
+ case "+":
+ case "{":
+ case "}":
+ case ":":
+ case "\"":
+ case "|":
+ case "<":
+ case ">":
+ case "?":
+ hasShift = true;
+ break;
+ default:
+ hasShift = (aChar == aChar.toUpperCase());
+ break;
+ }
+ synthesizeKey(aChar, { shiftKey: hasShift }, aWindow);
+}
+
+/**
+ * Send the string aStr to the focused element.
+ *
+ * For now this method only works for ASCII characters and emulates the shift
+ * key state on US keyboard layout.
+ */
+function sendString(aStr, aWindow) {
+ for (var i = 0; i < aStr.length; ++i) {
+ sendChar(aStr.charAt(i), aWindow);
+ }
+}
+
+/**
+ * Send the non-character key aKey to the focused node.
+ * The name of the key should be the part that comes after "DOM_VK_" in the
+ * KeyEvent constant name for this key.
+ * No modifiers are handled at this point.
+ */
+function sendKey(aKey, aWindow) {
+ var keyName = "VK_" + aKey.toUpperCase();
+ synthesizeKey(keyName, { shiftKey: false }, aWindow);
+}
+
+/**
+ * Parse the key modifier flags from aEvent. Used to share code between
+ * synthesizeMouse and synthesizeKey.
+ */
+function _parseModifiers(aEvent)
+{
+ const nsIDOMWindowUtils = _EU_Ci.nsIDOMWindowUtils;
+ var mval = 0;
+ if (aEvent.shiftKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_SHIFT;
+ }
+ if (aEvent.ctrlKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_CONTROL;
+ }
+ if (aEvent.altKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_ALT;
+ }
+ if (aEvent.metaKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_META;
+ }
+ if (aEvent.accelKey) {
+ mval |= (navigator.platform.indexOf("Mac") >= 0) ?
+ nsIDOMWindowUtils.MODIFIER_META : nsIDOMWindowUtils.MODIFIER_CONTROL;
+ }
+ if (aEvent.altGrKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_ALTGRAPH;
+ }
+ if (aEvent.capsLockKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_CAPSLOCK;
+ }
+ if (aEvent.fnKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_FN;
+ }
+ if (aEvent.numLockKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_NUMLOCK;
+ }
+ if (aEvent.scrollLockKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_SCROLLLOCK;
+ }
+ if (aEvent.symbolLockKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_SYMBOLLOCK;
+ }
+ if (aEvent.osKey) {
+ mval |= nsIDOMWindowUtils.MODIFIER_OS;
+ }
+
+ return mval;
+}
+
+/**
+ * Synthesize a mouse event on a target. The actual client point is determined
+ * by taking the aTarget's client box and offseting it by aOffsetX and
+ * aOffsetY. This allows mouse clicks to be simulated by calling this method.
+ *
+ * aEvent is an object which may contain the properties:
+ * shiftKey, ctrlKey, altKey, metaKey, accessKey, clickCount, button, type
+ *
+ * If the type is specified, an mouse event of that type is fired. Otherwise,
+ * a mousedown followed by a mouse up is performed.
+ *
+ * aWindow is optional, and defaults to the current window object.
+ *
+ * Returns whether the event had preventDefault() called on it.
+ */
+function synthesizeMouse(aTarget, aOffsetX, aOffsetY, aEvent, aWindow)
+{
+ var rect = aTarget.getBoundingClientRect();
+ return synthesizeMouseAtPoint(rect.left + aOffsetX, rect.top + aOffsetY,
+ aEvent, aWindow);
+}
+function synthesizeTouch(aTarget, aOffsetX, aOffsetY, aEvent, aWindow)
+{
+ var rect = aTarget.getBoundingClientRect();
+ synthesizeTouchAtPoint(rect.left + aOffsetX, rect.top + aOffsetY,
+ aEvent, aWindow);
+}
+
+/*
+ * Synthesize a mouse event at a particular point in aWindow.
+ *
+ * aEvent is an object which may contain the properties:
+ * shiftKey, ctrlKey, altKey, metaKey, accessKey, clickCount, button, type
+ *
+ * If the type is specified, an mouse event of that type is fired. Otherwise,
+ * a mousedown followed by a mouse up is performed.
+ *
+ * aWindow is optional, and defaults to the current window object.
+ */
+function synthesizeMouseAtPoint(left, top, aEvent, aWindow)
+{
+ var utils = _getDOMWindowUtils(aWindow);
+ var defaultPrevented = false;
+
+ if (utils) {
+ var button = aEvent.button || 0;
+ var clickCount = aEvent.clickCount || 1;
+ var modifiers = _parseModifiers(aEvent);
+ var pressure = ("pressure" in aEvent) ? aEvent.pressure : 0;
+ var inputSource = ("inputSource" in aEvent) ? aEvent.inputSource : 0;
+
+ if (("type" in aEvent) && aEvent.type) {
+ defaultPrevented = utils.sendMouseEvent(aEvent.type, left, top, button, clickCount, modifiers, false, pressure, inputSource);
+ }
+ else {
+ utils.sendMouseEvent("mousedown", left, top, button, clickCount, modifiers, false, pressure, inputSource);
+ utils.sendMouseEvent("mouseup", left, top, button, clickCount, modifiers, false, pressure, inputSource);
+ }
+ }
+
+ return defaultPrevented;
+}
+function synthesizeTouchAtPoint(left, top, aEvent, aWindow)
+{
+ var utils = _getDOMWindowUtils(aWindow);
+
+ if (utils) {
+ var id = aEvent.id || 0;
+ var rx = aEvent.rx || 1;
+ var ry = aEvent.rx || 1;
+ var angle = aEvent.angle || 0;
+ var force = aEvent.force || 1;
+ var modifiers = _parseModifiers(aEvent);
+
+ if (("type" in aEvent) && aEvent.type) {
+ utils.sendTouchEvent(aEvent.type, [id], [left], [top], [rx], [ry], [angle], [force], 1, modifiers);
+ }
+ else {
+ utils.sendTouchEvent("touchstart", [id], [left], [top], [rx], [ry], [angle], [force], 1, modifiers);
+ utils.sendTouchEvent("touchend", [id], [left], [top], [rx], [ry], [angle], [force], 1, modifiers);
+ }
+ }
+}
+// Call synthesizeMouse with coordinates at the center of aTarget.
+function synthesizeMouseAtCenter(aTarget, aEvent, aWindow)
+{
+ var rect = aTarget.getBoundingClientRect();
+ synthesizeMouse(aTarget, rect.width / 2, rect.height / 2, aEvent,
+ aWindow);
+}
+function synthesizeTouchAtCenter(aTarget, aEvent, aWindow)
+{
+ var rect = aTarget.getBoundingClientRect();
+ synthesizeTouch(aTarget, rect.width / 2, rect.height / 2, aEvent,
+ aWindow);
+}
+
+/**
+ * Synthesize a wheel event on a target. The actual client point is determined
+ * by taking the aTarget's client box and offseting it by aOffsetX and
+ * aOffsetY.
+ *
+ * aEvent is an object which may contain the properties:
+ * shiftKey, ctrlKey, altKey, metaKey, accessKey, deltaX, deltaY, deltaZ,
+ * deltaMode, lineOrPageDeltaX, lineOrPageDeltaY, isMomentum, isPixelOnlyDevice,
+ * isCustomizedByPrefs, expectedOverflowDeltaX, expectedOverflowDeltaY
+ *
+ * deltaMode must be defined, others are ok even if undefined.
+ *
+ * expectedOverflowDeltaX and expectedOverflowDeltaY take integer value. The
+ * value is just checked as 0 or positive or negative.
+ *
+ * aWindow is optional, and defaults to the current window object.
+ */
+function synthesizeWheel(aTarget, aOffsetX, aOffsetY, aEvent, aWindow)
+{
+ var utils = _getDOMWindowUtils(aWindow);
+ if (!utils) {
+ return;
+ }
+
+ var modifiers = _parseModifiers(aEvent);
+ var options = 0;
+ if (aEvent.isPixelOnlyDevice &&
+ (aEvent.deltaMode == WheelEvent.DOM_DELTA_PIXEL)) {
+ options |= utils.WHEEL_EVENT_CAUSED_BY_PIXEL_ONLY_DEVICE;
+ }
+ if (aEvent.isMomentum) {
+ options |= utils.WHEEL_EVENT_CAUSED_BY_MOMENTUM;
+ }
+ if (aEvent.isCustomizedByPrefs) {
+ options |= utils.WHEEL_EVENT_CUSTOMIZED_BY_USER_PREFS;
+ }
+ if (typeof aEvent.expectedOverflowDeltaX !== "undefined") {
+ if (aEvent.expectedOverflowDeltaX === 0) {
+ options |= utils.WHEEL_EVENT_EXPECTED_OVERFLOW_DELTA_X_ZERO;
+ } else if (aEvent.expectedOverflowDeltaX > 0) {
+ options |= utils.WHEEL_EVENT_EXPECTED_OVERFLOW_DELTA_X_POSITIVE;
+ } else {
+ options |= utils.WHEEL_EVENT_EXPECTED_OVERFLOW_DELTA_X_NEGATIVE;
+ }
+ }
+ if (typeof aEvent.expectedOverflowDeltaY !== "undefined") {
+ if (aEvent.expectedOverflowDeltaY === 0) {
+ options |= utils.WHEEL_EVENT_EXPECTED_OVERFLOW_DELTA_Y_ZERO;
+ } else if (aEvent.expectedOverflowDeltaY > 0) {
+ options |= utils.WHEEL_EVENT_EXPECTED_OVERFLOW_DELTA_Y_POSITIVE;
+ } else {
+ options |= utils.WHEEL_EVENT_EXPECTED_OVERFLOW_DELTA_Y_NEGATIVE;
+ }
+ }
+ var isPixelOnlyDevice =
+ aEvent.isPixelOnlyDevice && aEvent.deltaMode == WheelEvent.DOM_DELTA_PIXEL;
+
+ // Avoid the JS warnings "reference to undefined property"
+ if (!aEvent.deltaX) {
+ aEvent.deltaX = 0;
+ }
+ if (!aEvent.deltaY) {
+ aEvent.deltaY = 0;
+ }
+ if (!aEvent.deltaZ) {
+ aEvent.deltaZ = 0;
+ }
+
+ var lineOrPageDeltaX =
+ aEvent.lineOrPageDeltaX != null ? aEvent.lineOrPageDeltaX :
+ aEvent.deltaX > 0 ? Math.floor(aEvent.deltaX) :
+ Math.ceil(aEvent.deltaX);
+ var lineOrPageDeltaY =
+ aEvent.lineOrPageDeltaY != null ? aEvent.lineOrPageDeltaY :
+ aEvent.deltaY > 0 ? Math.floor(aEvent.deltaY) :
+ Math.ceil(aEvent.deltaY);
+
+ var rect = aTarget.getBoundingClientRect();
+ utils.sendWheelEvent(rect.left + aOffsetX, rect.top + aOffsetY,
+ aEvent.deltaX, aEvent.deltaY, aEvent.deltaZ,
+ aEvent.deltaMode, modifiers,
+ lineOrPageDeltaX, lineOrPageDeltaY, options);
+}
+
+function _computeKeyCodeFromChar(aChar)
+{
+ if (aChar.length != 1) {
+ return 0;
+ }
+ const nsIDOMKeyEvent = _EU_Ci.nsIDOMKeyEvent;
+ if (aChar >= 'a' && aChar <= 'z') {
+ return nsIDOMKeyEvent.DOM_VK_A + aChar.charCodeAt(0) - 'a'.charCodeAt(0);
+ }
+ if (aChar >= 'A' && aChar <= 'Z') {
+ return nsIDOMKeyEvent.DOM_VK_A + aChar.charCodeAt(0) - 'A'.charCodeAt(0);
+ }
+ if (aChar >= '0' && aChar <= '9') {
+ return nsIDOMKeyEvent.DOM_VK_0 + aChar.charCodeAt(0) - '0'.charCodeAt(0);
+ }
+ // returns US keyboard layout's keycode
+ switch (aChar) {
+ case '~':
+ case '`':
+ return nsIDOMKeyEvent.DOM_VK_BACK_QUOTE;
+ case '!':
+ return nsIDOMKeyEvent.DOM_VK_1;
+ case '@':
+ return nsIDOMKeyEvent.DOM_VK_2;
+ case '#':
+ return nsIDOMKeyEvent.DOM_VK_3;
+ case '$':
+ return nsIDOMKeyEvent.DOM_VK_4;
+ case '%':
+ return nsIDOMKeyEvent.DOM_VK_5;
+ case '^':
+ return nsIDOMKeyEvent.DOM_VK_6;
+ case '&':
+ return nsIDOMKeyEvent.DOM_VK_7;
+ case '*':
+ return nsIDOMKeyEvent.DOM_VK_8;
+ case '(':
+ return nsIDOMKeyEvent.DOM_VK_9;
+ case ')':
+ return nsIDOMKeyEvent.DOM_VK_0;
+ case '-':
+ case '_':
+ return nsIDOMKeyEvent.DOM_VK_SUBTRACT;
+ case '+':
+ case '=':
+ return nsIDOMKeyEvent.DOM_VK_EQUALS;
+ case '{':
+ case '[':
+ return nsIDOMKeyEvent.DOM_VK_OPEN_BRACKET;
+ case '}':
+ case ']':
+ return nsIDOMKeyEvent.DOM_VK_CLOSE_BRACKET;
+ case '|':
+ case '\\':
+ return nsIDOMKeyEvent.DOM_VK_BACK_SLASH;
+ case ':':
+ case ';':
+ return nsIDOMKeyEvent.DOM_VK_SEMICOLON;
+ case '\'':
+ case '"':
+ return nsIDOMKeyEvent.DOM_VK_QUOTE;
+ case '<':
+ case ',':
+ return nsIDOMKeyEvent.DOM_VK_COMMA;
+ case '>':
+ case '.':
+ return nsIDOMKeyEvent.DOM_VK_PERIOD;
+ case '?':
+ case '/':
+ return nsIDOMKeyEvent.DOM_VK_SLASH;
+ default:
+ return 0;
+ }
+}
+
+/**
+ * isKeypressFiredKey() returns TRUE if the given key should cause keypress
+ * event when widget handles the native key event. Otherwise, FALSE.
+ *
+ * aDOMKeyCode should be one of consts of nsIDOMKeyEvent::DOM_VK_*, or a key
+ * name begins with "VK_", or a character.
+ */
+function isKeypressFiredKey(aDOMKeyCode)
+{
+ if (typeof(aDOMKeyCode) == "string") {
+ if (aDOMKeyCode.indexOf("VK_") == 0) {
+ aDOMKeyCode = KeyEvent["DOM_" + aDOMKeyCode];
+ if (!aDOMKeyCode) {
+ throw "Unknown key: " + aDOMKeyCode;
+ }
+ } else {
+ // If the key generates a character, it must cause a keypress event.
+ return true;
+ }
+ }
+ switch (aDOMKeyCode) {
+ case KeyEvent.DOM_VK_SHIFT:
+ case KeyEvent.DOM_VK_CONTROL:
+ case KeyEvent.DOM_VK_ALT:
+ case KeyEvent.DOM_VK_CAPS_LOCK:
+ case KeyEvent.DOM_VK_NUM_LOCK:
+ case KeyEvent.DOM_VK_SCROLL_LOCK:
+ case KeyEvent.DOM_VK_META:
+ return false;
+ default:
+ return true;
+ }
+}
+
+/**
+ * Synthesize a key event. It is targeted at whatever would be targeted by an
+ * actual keypress by the user, typically the focused element.
+ *
+ * aKey should be either a character or a keycode starting with VK_ such as
+ * VK_ENTER.
+ *
+ * aEvent is an object which may contain the properties:
+ * shiftKey, ctrlKey, altKey, metaKey, accessKey, type, location
+ *
+ * Sets one of KeyboardEvent.DOM_KEY_LOCATION_* to location. Otherwise,
+ * DOMWindowUtils will choose good location from the keycode.
+ *
+ * If the type is specified, a key event of that type is fired. Otherwise,
+ * a keydown, a keypress and then a keyup event are fired in sequence.
+ *
+ * aWindow is optional, and defaults to the current window object.
+ */
+function synthesizeKey(aKey, aEvent, aWindow)
+{
+ var utils = _getDOMWindowUtils(aWindow);
+ if (utils) {
+ var keyCode = 0, charCode = 0;
+ if (aKey.indexOf("VK_") == 0) {
+ keyCode = KeyEvent["DOM_" + aKey];
+ if (!keyCode) {
+ throw "Unknown key: " + aKey;
+ }
+ } else {
+ charCode = aKey.charCodeAt(0);
+ keyCode = _computeKeyCodeFromChar(aKey.charAt(0));
+ }
+
+ var modifiers = _parseModifiers(aEvent);
+ var flags = 0;
+ if (aEvent.location != undefined) {
+ switch (aEvent.location) {
+ case KeyboardEvent.DOM_KEY_LOCATION_STANDARD:
+ flags |= utils.KEY_FLAG_LOCATION_STANDARD;
+ break;
+ case KeyboardEvent.DOM_KEY_LOCATION_LEFT:
+ flags |= utils.KEY_FLAG_LOCATION_LEFT;
+ break;
+ case KeyboardEvent.DOM_KEY_LOCATION_RIGHT:
+ flags |= utils.KEY_FLAG_LOCATION_RIGHT;
+ break;
+ case KeyboardEvent.DOM_KEY_LOCATION_NUMPAD:
+ flags |= utils.KEY_FLAG_LOCATION_NUMPAD;
+ break;
+ }
+ }
+
+ if (!("type" in aEvent) || !aEvent.type) {
+ // Send keydown + (optional) keypress + keyup events.
+ var keyDownDefaultHappened =
+ utils.sendKeyEvent("keydown", keyCode, 0, modifiers, flags);
+ if (isKeypressFiredKey(keyCode)) {
+ if (!keyDownDefaultHappened) {
+ flags |= utils.KEY_FLAG_PREVENT_DEFAULT;
+ }
+ utils.sendKeyEvent("keypress", keyCode, charCode, modifiers, flags);
+ }
+ utils.sendKeyEvent("keyup", keyCode, 0, modifiers, flags);
+ } else if (aEvent.type == "keypress") {
+ // Send standalone keypress event.
+ utils.sendKeyEvent(aEvent.type, keyCode, charCode, modifiers, flags);
+ } else {
+ // Send other standalone event than keypress.
+ utils.sendKeyEvent(aEvent.type, keyCode, 0, modifiers, flags);
+ }
+ }
+}
+
+var _gSeenEvent = false;
+
+/**
+ * Indicate that an event with an original target of aExpectedTarget and
+ * a type of aExpectedEvent is expected to be fired, or not expected to
+ * be fired.
+ */
+function _expectEvent(aExpectedTarget, aExpectedEvent, aTestName)
+{
+ if (!aExpectedTarget || !aExpectedEvent)
+ return null;
+
+ _gSeenEvent = false;
+
+ var type = (aExpectedEvent.charAt(0) == "!") ?
+ aExpectedEvent.substring(1) : aExpectedEvent;
+ var eventHandler = function(event) {
+ var epassed = (!_gSeenEvent && event.originalTarget == aExpectedTarget &&
+ event.type == type);
+ is(epassed, true, aTestName + " " + type + " event target " + (_gSeenEvent ? "twice" : ""));
+ _gSeenEvent = true;
+ };
+
+ aExpectedTarget.addEventListener(type, eventHandler, false);
+ return eventHandler;
+}
+
+/**
+ * Check if the event was fired or not. The event handler aEventHandler
+ * will be removed.
+ */
+function _checkExpectedEvent(aExpectedTarget, aExpectedEvent, aEventHandler, aTestName)
+{
+ if (aEventHandler) {
+ var expectEvent = (aExpectedEvent.charAt(0) != "!");
+ var type = expectEvent ? aExpectedEvent : aExpectedEvent.substring(1);
+ aExpectedTarget.removeEventListener(type, aEventHandler, false);
+ var desc = type + " event";
+ if (!expectEvent)
+ desc += " not";
+ is(_gSeenEvent, expectEvent, aTestName + " " + desc + " fired");
+ }
+
+ _gSeenEvent = false;
+}
+
+/**
+ * Similar to synthesizeMouse except that a test is performed to see if an
+ * event is fired at the right target as a result.
+ *
+ * aExpectedTarget - the expected originalTarget of the event.
+ * aExpectedEvent - the expected type of the event, such as 'select'.
+ * aTestName - the test name when outputing results
+ *
+ * To test that an event is not fired, use an expected type preceded by an
+ * exclamation mark, such as '!select'. This might be used to test that a
+ * click on a disabled element doesn't fire certain events for instance.
+ *
+ * aWindow is optional, and defaults to the current window object.
+ */
+function synthesizeMouseExpectEvent(aTarget, aOffsetX, aOffsetY, aEvent,
+ aExpectedTarget, aExpectedEvent, aTestName,
+ aWindow)
+{
+ var eventHandler = _expectEvent(aExpectedTarget, aExpectedEvent, aTestName);
+ synthesizeMouse(aTarget, aOffsetX, aOffsetY, aEvent, aWindow);
+ _checkExpectedEvent(aExpectedTarget, aExpectedEvent, eventHandler, aTestName);
+}
+
+/**
+ * Similar to synthesizeKey except that a test is performed to see if an
+ * event is fired at the right target as a result.
+ *
+ * aExpectedTarget - the expected originalTarget of the event.
+ * aExpectedEvent - the expected type of the event, such as 'select'.
+ * aTestName - the test name when outputing results
+ *
+ * To test that an event is not fired, use an expected type preceded by an
+ * exclamation mark, such as '!select'.
+ *
+ * aWindow is optional, and defaults to the current window object.
+ */
+function synthesizeKeyExpectEvent(key, aEvent, aExpectedTarget, aExpectedEvent,
+ aTestName, aWindow)
+{
+ var eventHandler = _expectEvent(aExpectedTarget, aExpectedEvent, aTestName);
+ synthesizeKey(key, aEvent, aWindow);
+ _checkExpectedEvent(aExpectedTarget, aExpectedEvent, eventHandler, aTestName);
+}
+
+function disableNonTestMouseEvents(aDisable)
+{
+ var domutils = _getDOMWindowUtils();
+ domutils.disableNonTestMouseEvents(aDisable);
+}
+
+function _getDOMWindowUtils(aWindow)
+{
+ if (!aWindow) {
+ aWindow = window;
+ }
+
+ // we need parent.SpecialPowers for:
+ // layout/base/tests/test_reftests_with_caret.html
+ // chrome: toolkit/content/tests/chrome/test_findbar.xul
+ // chrome: toolkit/content/tests/chrome/test_popup_anchor.xul
+ if ("SpecialPowers" in window && window.SpecialPowers != undefined) {
+ return SpecialPowers.getDOMWindowUtils(aWindow);
+ }
+ if ("SpecialPowers" in parent && parent.SpecialPowers != undefined) {
+ return parent.SpecialPowers.getDOMWindowUtils(aWindow);
+ }
+
+ //TODO: this is assuming we are in chrome space
+ return aWindow.QueryInterface(_EU_Ci.nsIInterfaceRequestor).
+ getInterface(_EU_Ci.nsIDOMWindowUtils);
+}
+
+// Must be synchronized with nsIDOMWindowUtils.
+const COMPOSITION_ATTR_RAWINPUT = 0x02;
+const COMPOSITION_ATTR_SELECTEDRAWTEXT = 0x03;
+const COMPOSITION_ATTR_CONVERTEDTEXT = 0x04;
+const COMPOSITION_ATTR_SELECTEDCONVERTEDTEXT = 0x05;
+
+/**
+ * Synthesize a composition event.
+ *
+ * @param aEvent The composition event information. This must
+ * have |type| member. The value must be
+ * "compositionstart", "compositionend" or
+ * "compositionupdate".
+ * And also this may have |data| and |locale| which
+ * would be used for the value of each property of
+ * the composition event. Note that the data would
+ * be ignored if the event type were
+ * "compositionstart".
+ * @param aWindow Optional (If null, current |window| will be used)
+ */
+function synthesizeComposition(aEvent, aWindow)
+{
+ var utils = _getDOMWindowUtils(aWindow);
+ if (!utils) {
+ return;
+ }
+
+ utils.sendCompositionEvent(aEvent.type, aEvent.data ? aEvent.data : "",
+ aEvent.locale ? aEvent.locale : "");
+}
+/**
+ * Synthesize a text event.
+ *
+ * @param aEvent The text event's information, this has |composition|
+ * and |caret| members. |composition| has |string| and
+ * |clauses| members. |clauses| must be array object. Each
+ * object has |length| and |attr|. And |caret| has |start| and
+ * |length|. See the following tree image.
+ *
+ * aEvent
+ * +-- composition
+ * | +-- string
+ * | +-- clauses[]
+ * | +-- length
+ * | +-- attr
+ * +-- caret
+ * +-- start
+ * +-- length
+ *
+ * Set the composition string to |composition.string|. Set its
+ * clauses information to the |clauses| array.
+ *
+ * When it's composing, set the each clauses' length to the
+ * |composition.clauses[n].length|. The sum of the all length
+ * values must be same as the length of |composition.string|.
+ * Set nsIDOMWindowUtils.COMPOSITION_ATTR_* to the
+ * |composition.clauses[n].attr|.
+ *
+ * When it's not composing, set 0 to the
+ * |composition.clauses[0].length| and
+ * |composition.clauses[0].attr|.
+ *
+ * Set caret position to the |caret.start|. It's offset from
+ * the start of the composition string. Set caret length to
+ * |caret.length|. If it's larger than 0, it should be wide
+ * caret. However, current nsEditor doesn't support wide
+ * caret, therefore, you should always set 0 now.
+ *
+ * @param aWindow Optional (If null, current |window| will be used)
+ */
+function synthesizeText(aEvent, aWindow)
+{
+ var utils = _getDOMWindowUtils(aWindow);
+ if (!utils) {
+ return;
+ }
+
+ if (!aEvent.composition || !aEvent.composition.clauses ||
+ !aEvent.composition.clauses[0]) {
+ return;
+ }
+
+ var firstClauseLength = aEvent.composition.clauses[0].length;
+ var firstClauseAttr = aEvent.composition.clauses[0].attr;
+ var secondClauseLength = 0;
+ var secondClauseAttr = 0;
+ var thirdClauseLength = 0;
+ var thirdClauseAttr = 0;
+ if (aEvent.composition.clauses[1]) {
+ secondClauseLength = aEvent.composition.clauses[1].length;
+ secondClauseAttr = aEvent.composition.clauses[1].attr;
+ if (aEvent.composition.clauses[2]) {
+ thirdClauseLength = aEvent.composition.clauses[2].length;
+ thirdClauseAttr = aEvent.composition.clauses[2].attr;
+ }
+ }
+
+ var caretStart = -1;
+ var caretLength = 0;
+ if (aEvent.caret) {
+ caretStart = aEvent.caret.start;
+ caretLength = aEvent.caret.length;
+ }
+
+ utils.sendTextEvent(aEvent.composition.string,
+ firstClauseLength, firstClauseAttr,
+ secondClauseLength, secondClauseAttr,
+ thirdClauseLength, thirdClauseAttr,
+ caretStart, caretLength);
+}
+
+/**
+ * Synthesize a query selected text event.
+ *
+ * @param aWindow Optional (If null, current |window| will be used)
+ * @return An nsIQueryContentEventResult object. If this failed,
+ * the result might be null.
+ */
+function synthesizeQuerySelectedText(aWindow)
+{
+ var utils = _getDOMWindowUtils(aWindow);
+ if (!utils) {
+ return null;
+ }
+
+ return utils.sendQueryContentEvent(utils.QUERY_SELECTED_TEXT, 0, 0, 0, 0);
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/arrays.js b/services/sync/tps/extensions/mozmill/resource/stdlib/arrays.js
new file mode 100644
index 000000000..c70a262c9
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/arrays.js
@@ -0,0 +1,78 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['inArray', 'getSet', 'indexOf',
+ 'remove', 'rindexOf', 'compare'];
+
+
+function remove(array, from, to) {
+ var rest = array.slice((to || from) + 1 || array.length);
+ array.length = from < 0 ? array.length + from : from;
+
+ return array.push.apply(array, rest);
+}
+
+function inArray(array, value) {
+ for (var i in array) {
+ if (value == array[i]) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+function getSet(array) {
+ var narray = [];
+
+ for (var i in array) {
+ if (!inArray(narray, array[i])) {
+ narray.push(array[i]);
+ }
+ }
+
+ return narray;
+}
+
+function indexOf(array, v, offset) {
+ for (var i in array) {
+ if (offset == undefined || i >= offset) {
+ if (!isNaN(i) && array[i] == v) {
+ return new Number(i);
+ }
+ }
+ }
+
+ return -1;
+}
+
+function rindexOf (array, v) {
+ var l = array.length;
+
+ for (var i in array) {
+ if (!isNaN(i)) {
+ var i = new Number(i);
+ }
+
+ if (!isNaN(i) && array[l - i] == v) {
+ return l - i;
+ }
+ }
+
+ return -1;
+}
+
+function compare (array, carray) {
+ if (array.length != carray.length) {
+ return false;
+ }
+
+ for (var i in array) {
+ if (array[i] != carray[i]) {
+ return false;
+ }
+ }
+
+ return true;
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/dom.js b/services/sync/tps/extensions/mozmill/resource/stdlib/dom.js
new file mode 100644
index 000000000..06bfcb529
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/dom.js
@@ -0,0 +1,24 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['getAttributes'];
+
+
+var getAttributes = function (node) {
+ var attributes = {};
+
+ for (var i in node.attributes) {
+ if (!isNaN(i)) {
+ try {
+ var attr = node.attributes[i];
+ attributes[attr.name] = attr.value;
+ }
+ catch (e) {
+ }
+ }
+ }
+
+ return attributes;
+}
+
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/httpd.js b/services/sync/tps/extensions/mozmill/resource/stdlib/httpd.js
new file mode 100644
index 000000000..c5eea6251
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/httpd.js
@@ -0,0 +1,5355 @@
+/* -*- Mode: JavaScript; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*
+ * An implementation of an HTTP server both as a loadable script and as an XPCOM
+ * component. See the accompanying README file for user documentation on
+ * httpd.js.
+ */
+
+this.EXPORTED_SYMBOLS = [
+ "HTTP_400",
+ "HTTP_401",
+ "HTTP_402",
+ "HTTP_403",
+ "HTTP_404",
+ "HTTP_405",
+ "HTTP_406",
+ "HTTP_407",
+ "HTTP_408",
+ "HTTP_409",
+ "HTTP_410",
+ "HTTP_411",
+ "HTTP_412",
+ "HTTP_413",
+ "HTTP_414",
+ "HTTP_415",
+ "HTTP_417",
+ "HTTP_500",
+ "HTTP_501",
+ "HTTP_502",
+ "HTTP_503",
+ "HTTP_504",
+ "HTTP_505",
+ "HttpError",
+ "HttpServer",
+];
+
+Components.utils.import("resource://gre/modules/XPCOMUtils.jsm");
+
+const Cc = Components.classes;
+const Ci = Components.interfaces;
+const Cr = Components.results;
+const Cu = Components.utils;
+const CC = Components.Constructor;
+
+const PR_UINT32_MAX = Math.pow(2, 32) - 1;
+
+/** True if debugging output is enabled, false otherwise. */
+var DEBUG = false; // non-const *only* so tweakable in server tests
+
+/** True if debugging output should be timestamped. */
+var DEBUG_TIMESTAMP = false; // non-const so tweakable in server tests
+
+var gGlobalObject = this;
+
+/**
+ * Asserts that the given condition holds. If it doesn't, the given message is
+ * dumped, a stack trace is printed, and an exception is thrown to attempt to
+ * stop execution (which unfortunately must rely upon the exception not being
+ * accidentally swallowed by the code that uses it).
+ */
+function NS_ASSERT(cond, msg)
+{
+ if (DEBUG && !cond)
+ {
+ dumpn("###!!!");
+ dumpn("###!!! ASSERTION" + (msg ? ": " + msg : "!"));
+ dumpn("###!!! Stack follows:");
+
+ var stack = new Error().stack.split(/\n/);
+ dumpn(stack.map(function(val) { return "###!!! " + val; }).join("\n"));
+
+ throw Cr.NS_ERROR_ABORT;
+ }
+}
+
+/** Constructs an HTTP error object. */
+this.HttpError = function HttpError(code, description)
+{
+ this.code = code;
+ this.description = description;
+}
+HttpError.prototype =
+{
+ toString: function()
+ {
+ return this.code + " " + this.description;
+ }
+};
+
+/**
+ * Errors thrown to trigger specific HTTP server responses.
+ */
+this.HTTP_400 = new HttpError(400, "Bad Request");
+this.HTTP_401 = new HttpError(401, "Unauthorized");
+this.HTTP_402 = new HttpError(402, "Payment Required");
+this.HTTP_403 = new HttpError(403, "Forbidden");
+this.HTTP_404 = new HttpError(404, "Not Found");
+this.HTTP_405 = new HttpError(405, "Method Not Allowed");
+this.HTTP_406 = new HttpError(406, "Not Acceptable");
+this.HTTP_407 = new HttpError(407, "Proxy Authentication Required");
+this.HTTP_408 = new HttpError(408, "Request Timeout");
+this.HTTP_409 = new HttpError(409, "Conflict");
+this.HTTP_410 = new HttpError(410, "Gone");
+this.HTTP_411 = new HttpError(411, "Length Required");
+this.HTTP_412 = new HttpError(412, "Precondition Failed");
+this.HTTP_413 = new HttpError(413, "Request Entity Too Large");
+this.HTTP_414 = new HttpError(414, "Request-URI Too Long");
+this.HTTP_415 = new HttpError(415, "Unsupported Media Type");
+this.HTTP_417 = new HttpError(417, "Expectation Failed");
+
+this.HTTP_500 = new HttpError(500, "Internal Server Error");
+this.HTTP_501 = new HttpError(501, "Not Implemented");
+this.HTTP_502 = new HttpError(502, "Bad Gateway");
+this.HTTP_503 = new HttpError(503, "Service Unavailable");
+this.HTTP_504 = new HttpError(504, "Gateway Timeout");
+this.HTTP_505 = new HttpError(505, "HTTP Version Not Supported");
+
+/** Creates a hash with fields corresponding to the values in arr. */
+function array2obj(arr)
+{
+ var obj = {};
+ for (var i = 0; i < arr.length; i++)
+ obj[arr[i]] = arr[i];
+ return obj;
+}
+
+/** Returns an array of the integers x through y, inclusive. */
+function range(x, y)
+{
+ var arr = [];
+ for (var i = x; i <= y; i++)
+ arr.push(i);
+ return arr;
+}
+
+/** An object (hash) whose fields are the numbers of all HTTP error codes. */
+const HTTP_ERROR_CODES = array2obj(range(400, 417).concat(range(500, 505)));
+
+
+/**
+ * The character used to distinguish hidden files from non-hidden files, a la
+ * the leading dot in Apache. Since that mechanism also hides files from
+ * easy display in LXR, ls output, etc. however, we choose instead to use a
+ * suffix character. If a requested file ends with it, we append another
+ * when getting the file on the server. If it doesn't, we just look up that
+ * file. Therefore, any file whose name ends with exactly one of the character
+ * is "hidden" and available for use by the server.
+ */
+const HIDDEN_CHAR = "^";
+
+/**
+ * The file name suffix indicating the file containing overridden headers for
+ * a requested file.
+ */
+const HEADERS_SUFFIX = HIDDEN_CHAR + "headers" + HIDDEN_CHAR;
+
+/** Type used to denote SJS scripts for CGI-like functionality. */
+const SJS_TYPE = "sjs";
+
+/** Base for relative timestamps produced by dumpn(). */
+var firstStamp = 0;
+
+/** dump(str) with a trailing "\n" -- only outputs if DEBUG. */
+function dumpn(str)
+{
+ if (DEBUG)
+ {
+ var prefix = "HTTPD-INFO | ";
+ if (DEBUG_TIMESTAMP)
+ {
+ if (firstStamp === 0)
+ firstStamp = Date.now();
+
+ var elapsed = Date.now() - firstStamp; // milliseconds
+ var min = Math.floor(elapsed / 60000);
+ var sec = (elapsed % 60000) / 1000;
+
+ if (sec < 10)
+ prefix += min + ":0" + sec.toFixed(3) + " | ";
+ else
+ prefix += min + ":" + sec.toFixed(3) + " | ";
+ }
+
+ dump(prefix + str + "\n");
+ }
+}
+
+/** Dumps the current JS stack if DEBUG. */
+function dumpStack()
+{
+ // peel off the frames for dumpStack() and Error()
+ var stack = new Error().stack.split(/\n/).slice(2);
+ stack.forEach(dumpn);
+}
+
+
+/** The XPCOM thread manager. */
+var gThreadManager = null;
+
+/** The XPCOM prefs service. */
+var gRootPrefBranch = null;
+function getRootPrefBranch()
+{
+ if (!gRootPrefBranch)
+ {
+ gRootPrefBranch = Cc["@mozilla.org/preferences-service;1"]
+ .getService(Ci.nsIPrefBranch);
+ }
+ return gRootPrefBranch;
+}
+
+/**
+ * JavaScript constructors for commonly-used classes; precreating these is a
+ * speedup over doing the same from base principles. See the docs at
+ * http://developer.mozilla.org/en/docs/Components.Constructor for details.
+ */
+const ServerSocket = CC("@mozilla.org/network/server-socket;1",
+ "nsIServerSocket",
+ "init");
+const ScriptableInputStream = CC("@mozilla.org/scriptableinputstream;1",
+ "nsIScriptableInputStream",
+ "init");
+const Pipe = CC("@mozilla.org/pipe;1",
+ "nsIPipe",
+ "init");
+const FileInputStream = CC("@mozilla.org/network/file-input-stream;1",
+ "nsIFileInputStream",
+ "init");
+const ConverterInputStream = CC("@mozilla.org/intl/converter-input-stream;1",
+ "nsIConverterInputStream",
+ "init");
+const WritablePropertyBag = CC("@mozilla.org/hash-property-bag;1",
+ "nsIWritablePropertyBag2");
+const SupportsString = CC("@mozilla.org/supports-string;1",
+ "nsISupportsString");
+
+/* These two are non-const only so a test can overwrite them. */
+var BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream",
+ "setInputStream");
+var BinaryOutputStream = CC("@mozilla.org/binaryoutputstream;1",
+ "nsIBinaryOutputStream",
+ "setOutputStream");
+
+/**
+ * Returns the RFC 822/1123 representation of a date.
+ *
+ * @param date : Number
+ * the date, in milliseconds from midnight (00:00:00), January 1, 1970 GMT
+ * @returns string
+ * the representation of the given date
+ */
+function toDateString(date)
+{
+ //
+ // rfc1123-date = wkday "," SP date1 SP time SP "GMT"
+ // date1 = 2DIGIT SP month SP 4DIGIT
+ // ; day month year (e.g., 02 Jun 1982)
+ // time = 2DIGIT ":" 2DIGIT ":" 2DIGIT
+ // ; 00:00:00 - 23:59:59
+ // wkday = "Mon" | "Tue" | "Wed"
+ // | "Thu" | "Fri" | "Sat" | "Sun"
+ // month = "Jan" | "Feb" | "Mar" | "Apr"
+ // | "May" | "Jun" | "Jul" | "Aug"
+ // | "Sep" | "Oct" | "Nov" | "Dec"
+ //
+
+ const wkdayStrings = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
+ const monthStrings = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
+ "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
+
+ /**
+ * Processes a date and returns the encoded UTC time as a string according to
+ * the format specified in RFC 2616.
+ *
+ * @param date : Date
+ * the date to process
+ * @returns string
+ * a string of the form "HH:MM:SS", ranging from "00:00:00" to "23:59:59"
+ */
+ function toTime(date)
+ {
+ var hrs = date.getUTCHours();
+ var rv = (hrs < 10) ? "0" + hrs : hrs;
+
+ var mins = date.getUTCMinutes();
+ rv += ":";
+ rv += (mins < 10) ? "0" + mins : mins;
+
+ var secs = date.getUTCSeconds();
+ rv += ":";
+ rv += (secs < 10) ? "0" + secs : secs;
+
+ return rv;
+ }
+
+ /**
+ * Processes a date and returns the encoded UTC date as a string according to
+ * the date1 format specified in RFC 2616.
+ *
+ * @param date : Date
+ * the date to process
+ * @returns string
+ * a string of the form "HH:MM:SS", ranging from "00:00:00" to "23:59:59"
+ */
+ function toDate1(date)
+ {
+ var day = date.getUTCDate();
+ var month = date.getUTCMonth();
+ var year = date.getUTCFullYear();
+
+ var rv = (day < 10) ? "0" + day : day;
+ rv += " " + monthStrings[month];
+ rv += " " + year;
+
+ return rv;
+ }
+
+ date = new Date(date);
+
+ const fmtString = "%wkday%, %date1% %time% GMT";
+ var rv = fmtString.replace("%wkday%", wkdayStrings[date.getUTCDay()]);
+ rv = rv.replace("%time%", toTime(date));
+ return rv.replace("%date1%", toDate1(date));
+}
+
+/**
+ * Prints out a human-readable representation of the object o and its fields,
+ * omitting those whose names begin with "_" if showMembers != true (to ignore
+ * "private" properties exposed via getters/setters).
+ */
+function printObj(o, showMembers)
+{
+ var s = "******************************\n";
+ s += "o = {\n";
+ for (var i in o)
+ {
+ if (typeof(i) != "string" ||
+ (showMembers || (i.length > 0 && i[0] != "_")))
+ s+= " " + i + ": " + o[i] + ",\n";
+ }
+ s += " };\n";
+ s += "******************************";
+ dumpn(s);
+}
+
+/**
+ * Instantiates a new HTTP server.
+ */
+function nsHttpServer()
+{
+ if (!gThreadManager)
+ gThreadManager = Cc["@mozilla.org/thread-manager;1"].getService();
+
+ /** The port on which this server listens. */
+ this._port = undefined;
+
+ /** The socket associated with this. */
+ this._socket = null;
+
+ /** The handler used to process requests to this server. */
+ this._handler = new ServerHandler(this);
+
+ /** Naming information for this server. */
+ this._identity = new ServerIdentity();
+
+ /**
+ * Indicates when the server is to be shut down at the end of the request.
+ */
+ this._doQuit = false;
+
+ /**
+ * True if the socket in this is closed (and closure notifications have been
+ * sent and processed if the socket was ever opened), false otherwise.
+ */
+ this._socketClosed = true;
+
+ /**
+ * Used for tracking existing connections and ensuring that all connections
+ * are properly cleaned up before server shutdown; increases by 1 for every
+ * new incoming connection.
+ */
+ this._connectionGen = 0;
+
+ /**
+ * Hash of all open connections, indexed by connection number at time of
+ * creation.
+ */
+ this._connections = {};
+}
+nsHttpServer.prototype =
+{
+ classID: Components.ID("{54ef6f81-30af-4b1d-ac55-8ba811293e41}"),
+
+ // NSISERVERSOCKETLISTENER
+
+ /**
+ * Processes an incoming request coming in on the given socket and contained
+ * in the given transport.
+ *
+ * @param socket : nsIServerSocket
+ * the socket through which the request was served
+ * @param trans : nsISocketTransport
+ * the transport for the request/response
+ * @see nsIServerSocketListener.onSocketAccepted
+ */
+ onSocketAccepted: function(socket, trans)
+ {
+ dumpn("*** onSocketAccepted(socket=" + socket + ", trans=" + trans + ")");
+
+ dumpn(">>> new connection on " + trans.host + ":" + trans.port);
+
+ const SEGMENT_SIZE = 8192;
+ const SEGMENT_COUNT = 1024;
+ try
+ {
+ var input = trans.openInputStream(0, SEGMENT_SIZE, SEGMENT_COUNT)
+ .QueryInterface(Ci.nsIAsyncInputStream);
+ var output = trans.openOutputStream(0, 0, 0);
+ }
+ catch (e)
+ {
+ dumpn("*** error opening transport streams: " + e);
+ trans.close(Cr.NS_BINDING_ABORTED);
+ return;
+ }
+
+ var connectionNumber = ++this._connectionGen;
+
+ try
+ {
+ var conn = new Connection(input, output, this, socket.port, trans.port,
+ connectionNumber);
+ var reader = new RequestReader(conn);
+
+ // XXX add request timeout functionality here!
+
+ // Note: must use main thread here, or we might get a GC that will cause
+ // threadsafety assertions. We really need to fix XPConnect so that
+ // you can actually do things in multi-threaded JS. :-(
+ input.asyncWait(reader, 0, 0, gThreadManager.mainThread);
+ }
+ catch (e)
+ {
+ // Assume this connection can't be salvaged and bail on it completely;
+ // don't attempt to close it so that we can assert that any connection
+ // being closed is in this._connections.
+ dumpn("*** error in initial request-processing stages: " + e);
+ trans.close(Cr.NS_BINDING_ABORTED);
+ return;
+ }
+
+ this._connections[connectionNumber] = conn;
+ dumpn("*** starting connection " + connectionNumber);
+ },
+
+ /**
+ * Called when the socket associated with this is closed.
+ *
+ * @param socket : nsIServerSocket
+ * the socket being closed
+ * @param status : nsresult
+ * the reason the socket stopped listening (NS_BINDING_ABORTED if the server
+ * was stopped using nsIHttpServer.stop)
+ * @see nsIServerSocketListener.onStopListening
+ */
+ onStopListening: function(socket, status)
+ {
+ dumpn(">>> shutting down server on port " + socket.port);
+ for (var n in this._connections) {
+ if (!this._connections[n]._requestStarted) {
+ this._connections[n].close();
+ }
+ }
+ this._socketClosed = true;
+ if (this._hasOpenConnections()) {
+ dumpn("*** open connections!!!");
+ }
+ if (!this._hasOpenConnections())
+ {
+ dumpn("*** no open connections, notifying async from onStopListening");
+
+ // Notify asynchronously so that any pending teardown in stop() has a
+ // chance to run first.
+ var self = this;
+ var stopEvent =
+ {
+ run: function()
+ {
+ dumpn("*** _notifyStopped async callback");
+ self._notifyStopped();
+ }
+ };
+ gThreadManager.currentThread
+ .dispatch(stopEvent, Ci.nsIThread.DISPATCH_NORMAL);
+ }
+ },
+
+ // NSIHTTPSERVER
+
+ //
+ // see nsIHttpServer.start
+ //
+ start: function(port)
+ {
+ this._start(port, "localhost")
+ },
+
+ _start: function(port, host)
+ {
+ if (this._socket)
+ throw Cr.NS_ERROR_ALREADY_INITIALIZED;
+
+ this._port = port;
+ this._doQuit = this._socketClosed = false;
+
+ this._host = host;
+
+ // The listen queue needs to be long enough to handle
+ // network.http.max-persistent-connections-per-server or
+ // network.http.max-persistent-connections-per-proxy concurrent
+ // connections, plus a safety margin in case some other process is
+ // talking to the server as well.
+ var prefs = getRootPrefBranch();
+ var maxConnections = 5 + Math.max(
+ prefs.getIntPref("network.http.max-persistent-connections-per-server"),
+ prefs.getIntPref("network.http.max-persistent-connections-per-proxy"));
+
+ try
+ {
+ var loopback = true;
+ if (this._host != "127.0.0.1" && this._host != "localhost") {
+ var loopback = false;
+ }
+
+ // When automatically selecting a port, sometimes the chosen port is
+ // "blocked" from clients. We don't want to use these ports because
+ // tests will intermittently fail. So, we simply keep trying to to
+ // get a server socket until a valid port is obtained. We limit
+ // ourselves to finite attempts just so we don't loop forever.
+ var ios = Cc["@mozilla.org/network/io-service;1"]
+ .getService(Ci.nsIIOService);
+ var socket;
+ for (var i = 100; i; i--)
+ {
+ var temp = new ServerSocket(this._port,
+ loopback, // true = localhost, false = everybody
+ maxConnections);
+
+ var allowed = ios.allowPort(temp.port, "http");
+ if (!allowed)
+ {
+ dumpn(">>>Warning: obtained ServerSocket listens on a blocked " +
+ "port: " + temp.port);
+ }
+
+ if (!allowed && this._port == -1)
+ {
+ dumpn(">>>Throwing away ServerSocket with bad port.");
+ temp.close();
+ continue;
+ }
+
+ socket = temp;
+ break;
+ }
+
+ if (!socket) {
+ throw new Error("No socket server available. Are there no available ports?");
+ }
+
+ dumpn(">>> listening on port " + socket.port + ", " + maxConnections +
+ " pending connections");
+ socket.asyncListen(this);
+ this._port = socket.port;
+ this._identity._initialize(socket.port, host, true);
+ this._socket = socket;
+ }
+ catch (e)
+ {
+ dump("\n!!! could not start server on port " + port + ": " + e + "\n\n");
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+ }
+ },
+
+ //
+ // see nsIHttpServer.stop
+ //
+ stop: function(callback)
+ {
+ if (!callback)
+ throw Cr.NS_ERROR_NULL_POINTER;
+ if (!this._socket)
+ throw Cr.NS_ERROR_UNEXPECTED;
+
+ this._stopCallback = typeof callback === "function"
+ ? callback
+ : function() { callback.onStopped(); };
+
+ dumpn(">>> stopping listening on port " + this._socket.port);
+ this._socket.close();
+ this._socket = null;
+
+ // We can't have this identity any more, and the port on which we're running
+ // this server now could be meaningless the next time around.
+ this._identity._teardown();
+
+ this._doQuit = false;
+
+ // socket-close notification and pending request completion happen async
+ },
+
+ //
+ // see nsIHttpServer.registerFile
+ //
+ registerFile: function(path, file)
+ {
+ if (file && (!file.exists() || file.isDirectory()))
+ throw Cr.NS_ERROR_INVALID_ARG;
+
+ this._handler.registerFile(path, file);
+ },
+
+ //
+ // see nsIHttpServer.registerDirectory
+ //
+ registerDirectory: function(path, directory)
+ {
+ // XXX true path validation!
+ if (path.charAt(0) != "/" ||
+ path.charAt(path.length - 1) != "/" ||
+ (directory &&
+ (!directory.exists() || !directory.isDirectory())))
+ throw Cr.NS_ERROR_INVALID_ARG;
+
+ // XXX determine behavior of nonexistent /foo/bar when a /foo/bar/ mapping
+ // exists!
+
+ this._handler.registerDirectory(path, directory);
+ },
+
+ //
+ // see nsIHttpServer.registerPathHandler
+ //
+ registerPathHandler: function(path, handler)
+ {
+ this._handler.registerPathHandler(path, handler);
+ },
+
+ //
+ // see nsIHttpServer.registerPrefixHandler
+ //
+ registerPrefixHandler: function(prefix, handler)
+ {
+ this._handler.registerPrefixHandler(prefix, handler);
+ },
+
+ //
+ // see nsIHttpServer.registerErrorHandler
+ //
+ registerErrorHandler: function(code, handler)
+ {
+ this._handler.registerErrorHandler(code, handler);
+ },
+
+ //
+ // see nsIHttpServer.setIndexHandler
+ //
+ setIndexHandler: function(handler)
+ {
+ this._handler.setIndexHandler(handler);
+ },
+
+ //
+ // see nsIHttpServer.registerContentType
+ //
+ registerContentType: function(ext, type)
+ {
+ this._handler.registerContentType(ext, type);
+ },
+
+ //
+ // see nsIHttpServer.serverIdentity
+ //
+ get identity()
+ {
+ return this._identity;
+ },
+
+ //
+ // see nsIHttpServer.getState
+ //
+ getState: function(path, k)
+ {
+ return this._handler._getState(path, k);
+ },
+
+ //
+ // see nsIHttpServer.setState
+ //
+ setState: function(path, k, v)
+ {
+ return this._handler._setState(path, k, v);
+ },
+
+ //
+ // see nsIHttpServer.getSharedState
+ //
+ getSharedState: function(k)
+ {
+ return this._handler._getSharedState(k);
+ },
+
+ //
+ // see nsIHttpServer.setSharedState
+ //
+ setSharedState: function(k, v)
+ {
+ return this._handler._setSharedState(k, v);
+ },
+
+ //
+ // see nsIHttpServer.getObjectState
+ //
+ getObjectState: function(k)
+ {
+ return this._handler._getObjectState(k);
+ },
+
+ //
+ // see nsIHttpServer.setObjectState
+ //
+ setObjectState: function(k, v)
+ {
+ return this._handler._setObjectState(k, v);
+ },
+
+
+ // NSISUPPORTS
+
+ //
+ // see nsISupports.QueryInterface
+ //
+ QueryInterface: function(iid)
+ {
+ if (iid.equals(Ci.nsIHttpServer) ||
+ iid.equals(Ci.nsIServerSocketListener) ||
+ iid.equals(Ci.nsISupports))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+
+ // NON-XPCOM PUBLIC API
+
+ /**
+ * Returns true iff this server is not running (and is not in the process of
+ * serving any requests still to be processed when the server was last
+ * stopped after being run).
+ */
+ isStopped: function()
+ {
+ return this._socketClosed && !this._hasOpenConnections();
+ },
+
+ // PRIVATE IMPLEMENTATION
+
+ /** True if this server has any open connections to it, false otherwise. */
+ _hasOpenConnections: function()
+ {
+ //
+ // If we have any open connections, they're tracked as numeric properties on
+ // |this._connections|. The non-standard __count__ property could be used
+ // to check whether there are any properties, but standard-wise, even
+ // looking forward to ES5, there's no less ugly yet still O(1) way to do
+ // this.
+ //
+ for (var n in this._connections)
+ return true;
+ return false;
+ },
+
+ /** Calls the server-stopped callback provided when stop() was called. */
+ _notifyStopped: function()
+ {
+ NS_ASSERT(this._stopCallback !== null, "double-notifying?");
+ NS_ASSERT(!this._hasOpenConnections(), "should be done serving by now");
+
+ //
+ // NB: We have to grab this now, null out the member, *then* call the
+ // callback here, or otherwise the callback could (indirectly) futz with
+ // this._stopCallback by starting and immediately stopping this, at
+ // which point we'd be nulling out a field we no longer have a right to
+ // modify.
+ //
+ var callback = this._stopCallback;
+ this._stopCallback = null;
+ try
+ {
+ callback();
+ }
+ catch (e)
+ {
+ // not throwing because this is specified as being usually (but not
+ // always) asynchronous
+ dump("!!! error running onStopped callback: " + e + "\n");
+ }
+ },
+
+ /**
+ * Notifies this server that the given connection has been closed.
+ *
+ * @param connection : Connection
+ * the connection that was closed
+ */
+ _connectionClosed: function(connection)
+ {
+ NS_ASSERT(connection.number in this._connections,
+ "closing a connection " + this + " that we never added to the " +
+ "set of open connections?");
+ NS_ASSERT(this._connections[connection.number] === connection,
+ "connection number mismatch? " +
+ this._connections[connection.number]);
+ delete this._connections[connection.number];
+
+ // Fire a pending server-stopped notification if it's our responsibility.
+ if (!this._hasOpenConnections() && this._socketClosed)
+ this._notifyStopped();
+ // Bug 508125: Add a GC here else we'll use gigabytes of memory running
+ // mochitests. We can't rely on xpcshell doing an automated GC, as that
+ // would interfere with testing GC stuff...
+ Components.utils.forceGC();
+ },
+
+ /**
+ * Requests that the server be shut down when possible.
+ */
+ _requestQuit: function()
+ {
+ dumpn(">>> requesting a quit");
+ dumpStack();
+ this._doQuit = true;
+ }
+};
+
+this.HttpServer = nsHttpServer;
+
+//
+// RFC 2396 section 3.2.2:
+//
+// host = hostname | IPv4address
+// hostname = *( domainlabel "." ) toplabel [ "." ]
+// domainlabel = alphanum | alphanum *( alphanum | "-" ) alphanum
+// toplabel = alpha | alpha *( alphanum | "-" ) alphanum
+// IPv4address = 1*digit "." 1*digit "." 1*digit "." 1*digit
+//
+
+const HOST_REGEX =
+ new RegExp("^(?:" +
+ // *( domainlabel "." )
+ "(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)*" +
+ // toplabel
+ "[a-z](?:[a-z0-9-]*[a-z0-9])?" +
+ "|" +
+ // IPv4 address
+ "\\d+\\.\\d+\\.\\d+\\.\\d+" +
+ ")$",
+ "i");
+
+
+/**
+ * Represents the identity of a server. An identity consists of a set of
+ * (scheme, host, port) tuples denoted as locations (allowing a single server to
+ * serve multiple sites or to be used behind both HTTP and HTTPS proxies for any
+ * host/port). Any incoming request must be to one of these locations, or it
+ * will be rejected with an HTTP 400 error. One location, denoted as the
+ * primary location, is the location assigned in contexts where a location
+ * cannot otherwise be endogenously derived, such as for HTTP/1.0 requests.
+ *
+ * A single identity may contain at most one location per unique host/port pair;
+ * other than that, no restrictions are placed upon what locations may
+ * constitute an identity.
+ */
+function ServerIdentity()
+{
+ /** The scheme of the primary location. */
+ this._primaryScheme = "http";
+
+ /** The hostname of the primary location. */
+ this._primaryHost = "127.0.0.1"
+
+ /** The port number of the primary location. */
+ this._primaryPort = -1;
+
+ /**
+ * The current port number for the corresponding server, stored so that a new
+ * primary location can always be set if the current one is removed.
+ */
+ this._defaultPort = -1;
+
+ /**
+ * Maps hosts to maps of ports to schemes, e.g. the following would represent
+ * https://example.com:789/ and http://example.org/:
+ *
+ * {
+ * "xexample.com": { 789: "https" },
+ * "xexample.org": { 80: "http" }
+ * }
+ *
+ * Note the "x" prefix on hostnames, which prevents collisions with special
+ * JS names like "prototype".
+ */
+ this._locations = { "xlocalhost": {} };
+}
+ServerIdentity.prototype =
+{
+ // NSIHTTPSERVERIDENTITY
+
+ //
+ // see nsIHttpServerIdentity.primaryScheme
+ //
+ get primaryScheme()
+ {
+ if (this._primaryPort === -1)
+ throw Cr.NS_ERROR_NOT_INITIALIZED;
+ return this._primaryScheme;
+ },
+
+ //
+ // see nsIHttpServerIdentity.primaryHost
+ //
+ get primaryHost()
+ {
+ if (this._primaryPort === -1)
+ throw Cr.NS_ERROR_NOT_INITIALIZED;
+ return this._primaryHost;
+ },
+
+ //
+ // see nsIHttpServerIdentity.primaryPort
+ //
+ get primaryPort()
+ {
+ if (this._primaryPort === -1)
+ throw Cr.NS_ERROR_NOT_INITIALIZED;
+ return this._primaryPort;
+ },
+
+ //
+ // see nsIHttpServerIdentity.add
+ //
+ add: function(scheme, host, port)
+ {
+ this._validate(scheme, host, port);
+
+ var entry = this._locations["x" + host];
+ if (!entry)
+ this._locations["x" + host] = entry = {};
+
+ entry[port] = scheme;
+ },
+
+ //
+ // see nsIHttpServerIdentity.remove
+ //
+ remove: function(scheme, host, port)
+ {
+ this._validate(scheme, host, port);
+
+ var entry = this._locations["x" + host];
+ if (!entry)
+ return false;
+
+ var present = port in entry;
+ delete entry[port];
+
+ if (this._primaryScheme == scheme &&
+ this._primaryHost == host &&
+ this._primaryPort == port &&
+ this._defaultPort !== -1)
+ {
+ // Always keep at least one identity in existence at any time, unless
+ // we're in the process of shutting down (the last condition above).
+ this._primaryPort = -1;
+ this._initialize(this._defaultPort, host, false);
+ }
+
+ return present;
+ },
+
+ //
+ // see nsIHttpServerIdentity.has
+ //
+ has: function(scheme, host, port)
+ {
+ this._validate(scheme, host, port);
+
+ return "x" + host in this._locations &&
+ scheme === this._locations["x" + host][port];
+ },
+
+ //
+ // see nsIHttpServerIdentity.has
+ //
+ getScheme: function(host, port)
+ {
+ this._validate("http", host, port);
+
+ var entry = this._locations["x" + host];
+ if (!entry)
+ return "";
+
+ return entry[port] || "";
+ },
+
+ //
+ // see nsIHttpServerIdentity.setPrimary
+ //
+ setPrimary: function(scheme, host, port)
+ {
+ this._validate(scheme, host, port);
+
+ this.add(scheme, host, port);
+
+ this._primaryScheme = scheme;
+ this._primaryHost = host;
+ this._primaryPort = port;
+ },
+
+
+ // NSISUPPORTS
+
+ //
+ // see nsISupports.QueryInterface
+ //
+ QueryInterface: function(iid)
+ {
+ if (iid.equals(Ci.nsIHttpServerIdentity) || iid.equals(Ci.nsISupports))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+
+ // PRIVATE IMPLEMENTATION
+
+ /**
+ * Initializes the primary name for the corresponding server, based on the
+ * provided port number.
+ */
+ _initialize: function(port, host, addSecondaryDefault)
+ {
+ this._host = host;
+ if (this._primaryPort !== -1)
+ this.add("http", host, port);
+ else
+ this.setPrimary("http", "localhost", port);
+ this._defaultPort = port;
+
+ // Only add this if we're being called at server startup
+ if (addSecondaryDefault && host != "127.0.0.1")
+ this.add("http", "127.0.0.1", port);
+ },
+
+ /**
+ * Called at server shutdown time, unsets the primary location only if it was
+ * the default-assigned location and removes the default location from the
+ * set of locations used.
+ */
+ _teardown: function()
+ {
+ if (this._host != "127.0.0.1") {
+ // Not the default primary location, nothing special to do here
+ this.remove("http", "127.0.0.1", this._defaultPort);
+ }
+
+ // This is a *very* tricky bit of reasoning here; make absolutely sure the
+ // tests for this code pass before you commit changes to it.
+ if (this._primaryScheme == "http" &&
+ this._primaryHost == this._host &&
+ this._primaryPort == this._defaultPort)
+ {
+ // Make sure we don't trigger the readding logic in .remove(), then remove
+ // the default location.
+ var port = this._defaultPort;
+ this._defaultPort = -1;
+ this.remove("http", this._host, port);
+
+ // Ensure a server start triggers the setPrimary() path in ._initialize()
+ this._primaryPort = -1;
+ }
+ else
+ {
+ // No reason not to remove directly as it's not our primary location
+ this.remove("http", this._host, this._defaultPort);
+ }
+ },
+
+ /**
+ * Ensures scheme, host, and port are all valid with respect to RFC 2396.
+ *
+ * @throws NS_ERROR_ILLEGAL_VALUE
+ * if any argument doesn't match the corresponding production
+ */
+ _validate: function(scheme, host, port)
+ {
+ if (scheme !== "http" && scheme !== "https")
+ {
+ dumpn("*** server only supports http/https schemes: '" + scheme + "'");
+ dumpStack();
+ throw Cr.NS_ERROR_ILLEGAL_VALUE;
+ }
+ if (!HOST_REGEX.test(host))
+ {
+ dumpn("*** unexpected host: '" + host + "'");
+ throw Cr.NS_ERROR_ILLEGAL_VALUE;
+ }
+ if (port < 0 || port > 65535)
+ {
+ dumpn("*** unexpected port: '" + port + "'");
+ throw Cr.NS_ERROR_ILLEGAL_VALUE;
+ }
+ }
+};
+
+
+/**
+ * Represents a connection to the server (and possibly in the future the thread
+ * on which the connection is processed).
+ *
+ * @param input : nsIInputStream
+ * stream from which incoming data on the connection is read
+ * @param output : nsIOutputStream
+ * stream to write data out the connection
+ * @param server : nsHttpServer
+ * the server handling the connection
+ * @param port : int
+ * the port on which the server is running
+ * @param outgoingPort : int
+ * the outgoing port used by this connection
+ * @param number : uint
+ * a serial number used to uniquely identify this connection
+ */
+function Connection(input, output, server, port, outgoingPort, number)
+{
+ dumpn("*** opening new connection " + number + " on port " + outgoingPort);
+
+ /** Stream of incoming data. */
+ this.input = input;
+
+ /** Stream for outgoing data. */
+ this.output = output;
+
+ /** The server associated with this request. */
+ this.server = server;
+
+ /** The port on which the server is running. */
+ this.port = port;
+
+ /** The outgoing poort used by this connection. */
+ this._outgoingPort = outgoingPort;
+
+ /** The serial number of this connection. */
+ this.number = number;
+
+ /**
+ * The request for which a response is being generated, null if the
+ * incoming request has not been fully received or if it had errors.
+ */
+ this.request = null;
+
+ /** This allows a connection to disambiguate between a peer initiating a
+ * close and the socket being forced closed on shutdown.
+ */
+ this._closed = false;
+
+ /** State variable for debugging. */
+ this._processed = false;
+
+ /** whether or not 1st line of request has been received */
+ this._requestStarted = false;
+}
+Connection.prototype =
+{
+ /** Closes this connection's input/output streams. */
+ close: function()
+ {
+ if (this._closed)
+ return;
+
+ dumpn("*** closing connection " + this.number +
+ " on port " + this._outgoingPort);
+
+ this.input.close();
+ this.output.close();
+ this._closed = true;
+
+ var server = this.server;
+ server._connectionClosed(this);
+
+ // If an error triggered a server shutdown, act on it now
+ if (server._doQuit)
+ server.stop(function() { /* not like we can do anything better */ });
+ },
+
+ /**
+ * Initiates processing of this connection, using the data in the given
+ * request.
+ *
+ * @param request : Request
+ * the request which should be processed
+ */
+ process: function(request)
+ {
+ NS_ASSERT(!this._closed && !this._processed);
+
+ this._processed = true;
+
+ this.request = request;
+ this.server._handler.handleResponse(this);
+ },
+
+ /**
+ * Initiates processing of this connection, generating a response with the
+ * given HTTP error code.
+ *
+ * @param code : uint
+ * an HTTP code, so in the range [0, 1000)
+ * @param request : Request
+ * incomplete data about the incoming request (since there were errors
+ * during its processing
+ */
+ processError: function(code, request)
+ {
+ NS_ASSERT(!this._closed && !this._processed);
+
+ this._processed = true;
+ this.request = request;
+ this.server._handler.handleError(code, this);
+ },
+
+ /** Converts this to a string for debugging purposes. */
+ toString: function()
+ {
+ return "<Connection(" + this.number +
+ (this.request ? ", " + this.request.path : "") +"): " +
+ (this._closed ? "closed" : "open") + ">";
+ },
+
+ requestStarted: function()
+ {
+ this._requestStarted = true;
+ }
+};
+
+
+
+/** Returns an array of count bytes from the given input stream. */
+function readBytes(inputStream, count)
+{
+ return new BinaryInputStream(inputStream).readByteArray(count);
+}
+
+
+
+/** Request reader processing states; see RequestReader for details. */
+const READER_IN_REQUEST_LINE = 0;
+const READER_IN_HEADERS = 1;
+const READER_IN_BODY = 2;
+const READER_FINISHED = 3;
+
+
+/**
+ * Reads incoming request data asynchronously, does any necessary preprocessing,
+ * and forwards it to the request handler. Processing occurs in three states:
+ *
+ * READER_IN_REQUEST_LINE Reading the request's status line
+ * READER_IN_HEADERS Reading headers in the request
+ * READER_IN_BODY Reading the body of the request
+ * READER_FINISHED Entire request has been read and processed
+ *
+ * During the first two stages, initial metadata about the request is gathered
+ * into a Request object. Once the status line and headers have been processed,
+ * we start processing the body of the request into the Request. Finally, when
+ * the entire body has been read, we create a Response and hand it off to the
+ * ServerHandler to be given to the appropriate request handler.
+ *
+ * @param connection : Connection
+ * the connection for the request being read
+ */
+function RequestReader(connection)
+{
+ /** Connection metadata for this request. */
+ this._connection = connection;
+
+ /**
+ * A container providing line-by-line access to the raw bytes that make up the
+ * data which has been read from the connection but has not yet been acted
+ * upon (by passing it to the request handler or by extracting request
+ * metadata from it).
+ */
+ this._data = new LineData();
+
+ /**
+ * The amount of data remaining to be read from the body of this request.
+ * After all headers in the request have been read this is the value in the
+ * Content-Length header, but as the body is read its value decreases to zero.
+ */
+ this._contentLength = 0;
+
+ /** The current state of parsing the incoming request. */
+ this._state = READER_IN_REQUEST_LINE;
+
+ /** Metadata constructed from the incoming request for the request handler. */
+ this._metadata = new Request(connection.port);
+
+ /**
+ * Used to preserve state if we run out of line data midway through a
+ * multi-line header. _lastHeaderName stores the name of the header, while
+ * _lastHeaderValue stores the value we've seen so far for the header.
+ *
+ * These fields are always either both undefined or both strings.
+ */
+ this._lastHeaderName = this._lastHeaderValue = undefined;
+}
+RequestReader.prototype =
+{
+ // NSIINPUTSTREAMCALLBACK
+
+ /**
+ * Called when more data from the incoming request is available. This method
+ * then reads the available data from input and deals with that data as
+ * necessary, depending upon the syntax of already-downloaded data.
+ *
+ * @param input : nsIAsyncInputStream
+ * the stream of incoming data from the connection
+ */
+ onInputStreamReady: function(input)
+ {
+ dumpn("*** onInputStreamReady(input=" + input + ") on thread " +
+ gThreadManager.currentThread + " (main is " +
+ gThreadManager.mainThread + ")");
+ dumpn("*** this._state == " + this._state);
+
+ // Handle cases where we get more data after a request error has been
+ // discovered but *before* we can close the connection.
+ var data = this._data;
+ if (!data)
+ return;
+
+ try
+ {
+ data.appendBytes(readBytes(input, input.available()));
+ }
+ catch (e)
+ {
+ if (streamClosed(e))
+ {
+ dumpn("*** WARNING: unexpected error when reading from socket; will " +
+ "be treated as if the input stream had been closed");
+ dumpn("*** WARNING: actual error was: " + e);
+ }
+
+ // We've lost a race -- input has been closed, but we're still expecting
+ // to read more data. available() will throw in this case, and since
+ // we're dead in the water now, destroy the connection.
+ dumpn("*** onInputStreamReady called on a closed input, destroying " +
+ "connection");
+ this._connection.close();
+ return;
+ }
+
+ switch (this._state)
+ {
+ default:
+ NS_ASSERT(false, "invalid state: " + this._state);
+ break;
+
+ case READER_IN_REQUEST_LINE:
+ if (!this._processRequestLine())
+ break;
+ /* fall through */
+
+ case READER_IN_HEADERS:
+ if (!this._processHeaders())
+ break;
+ /* fall through */
+
+ case READER_IN_BODY:
+ this._processBody();
+ }
+
+ if (this._state != READER_FINISHED)
+ input.asyncWait(this, 0, 0, gThreadManager.currentThread);
+ },
+
+ //
+ // see nsISupports.QueryInterface
+ //
+ QueryInterface: function(aIID)
+ {
+ if (aIID.equals(Ci.nsIInputStreamCallback) ||
+ aIID.equals(Ci.nsISupports))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+
+ // PRIVATE API
+
+ /**
+ * Processes unprocessed, downloaded data as a request line.
+ *
+ * @returns boolean
+ * true iff the request line has been fully processed
+ */
+ _processRequestLine: function()
+ {
+ NS_ASSERT(this._state == READER_IN_REQUEST_LINE);
+
+ // Servers SHOULD ignore any empty line(s) received where a Request-Line
+ // is expected (section 4.1).
+ var data = this._data;
+ var line = {};
+ var readSuccess;
+ while ((readSuccess = data.readLine(line)) && line.value == "")
+ dumpn("*** ignoring beginning blank line...");
+
+ // if we don't have a full line, wait until we do
+ if (!readSuccess)
+ return false;
+
+ // we have the first non-blank line
+ try
+ {
+ this._parseRequestLine(line.value);
+ this._state = READER_IN_HEADERS;
+ this._connection.requestStarted();
+ return true;
+ }
+ catch (e)
+ {
+ this._handleError(e);
+ return false;
+ }
+ },
+
+ /**
+ * Processes stored data, assuming it is either at the beginning or in
+ * the middle of processing request headers.
+ *
+ * @returns boolean
+ * true iff header data in the request has been fully processed
+ */
+ _processHeaders: function()
+ {
+ NS_ASSERT(this._state == READER_IN_HEADERS);
+
+ // XXX things to fix here:
+ //
+ // - need to support RFC 2047-encoded non-US-ASCII characters
+
+ try
+ {
+ var done = this._parseHeaders();
+ if (done)
+ {
+ var request = this._metadata;
+
+ // XXX this is wrong for requests with transfer-encodings applied to
+ // them, particularly chunked (which by its nature can have no
+ // meaningful Content-Length header)!
+ this._contentLength = request.hasHeader("Content-Length")
+ ? parseInt(request.getHeader("Content-Length"), 10)
+ : 0;
+ dumpn("_processHeaders, Content-length=" + this._contentLength);
+
+ this._state = READER_IN_BODY;
+ }
+ return done;
+ }
+ catch (e)
+ {
+ this._handleError(e);
+ return false;
+ }
+ },
+
+ /**
+ * Processes stored data, assuming it is either at the beginning or in
+ * the middle of processing the request body.
+ *
+ * @returns boolean
+ * true iff the request body has been fully processed
+ */
+ _processBody: function()
+ {
+ NS_ASSERT(this._state == READER_IN_BODY);
+
+ // XXX handle chunked transfer-coding request bodies!
+
+ try
+ {
+ if (this._contentLength > 0)
+ {
+ var data = this._data.purge();
+ var count = Math.min(data.length, this._contentLength);
+ dumpn("*** loading data=" + data + " len=" + data.length +
+ " excess=" + (data.length - count));
+
+ var bos = new BinaryOutputStream(this._metadata._bodyOutputStream);
+ bos.writeByteArray(data, count);
+ this._contentLength -= count;
+ }
+
+ dumpn("*** remaining body data len=" + this._contentLength);
+ if (this._contentLength == 0)
+ {
+ this._validateRequest();
+ this._state = READER_FINISHED;
+ this._handleResponse();
+ return true;
+ }
+
+ return false;
+ }
+ catch (e)
+ {
+ this._handleError(e);
+ return false;
+ }
+ },
+
+ /**
+ * Does various post-header checks on the data in this request.
+ *
+ * @throws : HttpError
+ * if the request was malformed in some way
+ */
+ _validateRequest: function()
+ {
+ NS_ASSERT(this._state == READER_IN_BODY);
+
+ dumpn("*** _validateRequest");
+
+ var metadata = this._metadata;
+ var headers = metadata._headers;
+
+ // 19.6.1.1 -- servers MUST report 400 to HTTP/1.1 requests w/o Host header
+ var identity = this._connection.server.identity;
+ if (metadata._httpVersion.atLeast(nsHttpVersion.HTTP_1_1))
+ {
+ if (!headers.hasHeader("Host"))
+ {
+ dumpn("*** malformed HTTP/1.1 or greater request with no Host header!");
+ throw HTTP_400;
+ }
+
+ // If the Request-URI wasn't absolute, then we need to determine our host.
+ // We have to determine what scheme was used to access us based on the
+ // server identity data at this point, because the request just doesn't
+ // contain enough data on its own to do this, sadly.
+ if (!metadata._host)
+ {
+ var host, port;
+ var hostPort = headers.getHeader("Host");
+ var colon = hostPort.indexOf(":");
+ if (colon < 0)
+ {
+ host = hostPort;
+ port = "";
+ }
+ else
+ {
+ host = hostPort.substring(0, colon);
+ port = hostPort.substring(colon + 1);
+ }
+
+ // NB: We allow an empty port here because, oddly, a colon may be
+ // present even without a port number, e.g. "example.com:"; in this
+ // case the default port applies.
+ if (!HOST_REGEX.test(host) || !/^\d*$/.test(port))
+ {
+ dumpn("*** malformed hostname (" + hostPort + ") in Host " +
+ "header, 400 time");
+ throw HTTP_400;
+ }
+
+ // If we're not given a port, we're stuck, because we don't know what
+ // scheme to use to look up the correct port here, in general. Since
+ // the HTTPS case requires a tunnel/proxy and thus requires that the
+ // requested URI be absolute (and thus contain the necessary
+ // information), let's assume HTTP will prevail and use that.
+ port = +port || 80;
+
+ var scheme = identity.getScheme(host, port);
+ if (!scheme)
+ {
+ dumpn("*** unrecognized hostname (" + hostPort + ") in Host " +
+ "header, 400 time");
+ throw HTTP_400;
+ }
+
+ metadata._scheme = scheme;
+ metadata._host = host;
+ metadata._port = port;
+ }
+ }
+ else
+ {
+ NS_ASSERT(metadata._host === undefined,
+ "HTTP/1.0 doesn't allow absolute paths in the request line!");
+
+ metadata._scheme = identity.primaryScheme;
+ metadata._host = identity.primaryHost;
+ metadata._port = identity.primaryPort;
+ }
+
+ NS_ASSERT(identity.has(metadata._scheme, metadata._host, metadata._port),
+ "must have a location we recognize by now!");
+ },
+
+ /**
+ * Handles responses in case of error, either in the server or in the request.
+ *
+ * @param e
+ * the specific error encountered, which is an HttpError in the case where
+ * the request is in some way invalid or cannot be fulfilled; if this isn't
+ * an HttpError we're going to be paranoid and shut down, because that
+ * shouldn't happen, ever
+ */
+ _handleError: function(e)
+ {
+ // Don't fall back into normal processing!
+ this._state = READER_FINISHED;
+
+ var server = this._connection.server;
+ if (e instanceof HttpError)
+ {
+ var code = e.code;
+ }
+ else
+ {
+ dumpn("!!! UNEXPECTED ERROR: " + e +
+ (e.lineNumber ? ", line " + e.lineNumber : ""));
+
+ // no idea what happened -- be paranoid and shut down
+ code = 500;
+ server._requestQuit();
+ }
+
+ // make attempted reuse of data an error
+ this._data = null;
+
+ this._connection.processError(code, this._metadata);
+ },
+
+ /**
+ * Now that we've read the request line and headers, we can actually hand off
+ * the request to be handled.
+ *
+ * This method is called once per request, after the request line and all
+ * headers and the body, if any, have been received.
+ */
+ _handleResponse: function()
+ {
+ NS_ASSERT(this._state == READER_FINISHED);
+
+ // We don't need the line-based data any more, so make attempted reuse an
+ // error.
+ this._data = null;
+
+ this._connection.process(this._metadata);
+ },
+
+
+ // PARSING
+
+ /**
+ * Parses the request line for the HTTP request associated with this.
+ *
+ * @param line : string
+ * the request line
+ */
+ _parseRequestLine: function(line)
+ {
+ NS_ASSERT(this._state == READER_IN_REQUEST_LINE);
+
+ dumpn("*** _parseRequestLine('" + line + "')");
+
+ var metadata = this._metadata;
+
+ // clients and servers SHOULD accept any amount of SP or HT characters
+ // between fields, even though only a single SP is required (section 19.3)
+ var request = line.split(/[ \t]+/);
+ if (!request || request.length != 3)
+ {
+ dumpn("*** No request in line");
+ throw HTTP_400;
+ }
+
+ metadata._method = request[0];
+
+ // get the HTTP version
+ var ver = request[2];
+ var match = ver.match(/^HTTP\/(\d+\.\d+)$/);
+ if (!match)
+ {
+ dumpn("*** No HTTP version in line");
+ throw HTTP_400;
+ }
+
+ // determine HTTP version
+ try
+ {
+ metadata._httpVersion = new nsHttpVersion(match[1]);
+ if (!metadata._httpVersion.atLeast(nsHttpVersion.HTTP_1_0))
+ throw "unsupported HTTP version";
+ }
+ catch (e)
+ {
+ // we support HTTP/1.0 and HTTP/1.1 only
+ throw HTTP_501;
+ }
+
+
+ var fullPath = request[1];
+ var serverIdentity = this._connection.server.identity;
+
+ var scheme, host, port;
+
+ if (fullPath.charAt(0) != "/")
+ {
+ // No absolute paths in the request line in HTTP prior to 1.1
+ if (!metadata._httpVersion.atLeast(nsHttpVersion.HTTP_1_1))
+ {
+ dumpn("*** Metadata version too low");
+ throw HTTP_400;
+ }
+
+ try
+ {
+ var uri = Cc["@mozilla.org/network/io-service;1"]
+ .getService(Ci.nsIIOService)
+ .newURI(fullPath, null, null);
+ fullPath = uri.path;
+ scheme = uri.scheme;
+ host = metadata._host = uri.asciiHost;
+ port = uri.port;
+ if (port === -1)
+ {
+ if (scheme === "http")
+ {
+ port = 80;
+ }
+ else if (scheme === "https")
+ {
+ port = 443;
+ }
+ else
+ {
+ dumpn("*** Unknown scheme: " + scheme);
+ throw HTTP_400;
+ }
+ }
+ }
+ catch (e)
+ {
+ // If the host is not a valid host on the server, the response MUST be a
+ // 400 (Bad Request) error message (section 5.2). Alternately, the URI
+ // is malformed.
+ dumpn("*** Threw when dealing with URI: " + e);
+ throw HTTP_400;
+ }
+
+ if (!serverIdentity.has(scheme, host, port) || fullPath.charAt(0) != "/")
+ {
+ dumpn("*** serverIdentity unknown or path does not start with '/'");
+ throw HTTP_400;
+ }
+ }
+
+ var splitter = fullPath.indexOf("?");
+ if (splitter < 0)
+ {
+ // _queryString already set in ctor
+ metadata._path = fullPath;
+ }
+ else
+ {
+ metadata._path = fullPath.substring(0, splitter);
+ metadata._queryString = fullPath.substring(splitter + 1);
+ }
+
+ metadata._scheme = scheme;
+ metadata._host = host;
+ metadata._port = port;
+ },
+
+ /**
+ * Parses all available HTTP headers in this until the header-ending CRLFCRLF,
+ * adding them to the store of headers in the request.
+ *
+ * @throws
+ * HTTP_400 if the headers are malformed
+ * @returns boolean
+ * true if all headers have now been processed, false otherwise
+ */
+ _parseHeaders: function()
+ {
+ NS_ASSERT(this._state == READER_IN_HEADERS);
+
+ dumpn("*** _parseHeaders");
+
+ var data = this._data;
+
+ var headers = this._metadata._headers;
+ var lastName = this._lastHeaderName;
+ var lastVal = this._lastHeaderValue;
+
+ var line = {};
+ while (true)
+ {
+ dumpn("*** Last name: '" + lastName + "'");
+ dumpn("*** Last val: '" + lastVal + "'");
+ NS_ASSERT(!((lastVal === undefined) ^ (lastName === undefined)),
+ lastName === undefined ?
+ "lastVal without lastName? lastVal: '" + lastVal + "'" :
+ "lastName without lastVal? lastName: '" + lastName + "'");
+
+ if (!data.readLine(line))
+ {
+ // save any data we have from the header we might still be processing
+ this._lastHeaderName = lastName;
+ this._lastHeaderValue = lastVal;
+ return false;
+ }
+
+ var lineText = line.value;
+ dumpn("*** Line text: '" + lineText + "'");
+ var firstChar = lineText.charAt(0);
+
+ // blank line means end of headers
+ if (lineText == "")
+ {
+ // we're finished with the previous header
+ if (lastName)
+ {
+ try
+ {
+ headers.setHeader(lastName, lastVal, true);
+ }
+ catch (e)
+ {
+ dumpn("*** setHeader threw on last header, e == " + e);
+ throw HTTP_400;
+ }
+ }
+ else
+ {
+ // no headers in request -- valid for HTTP/1.0 requests
+ }
+
+ // either way, we're done processing headers
+ this._state = READER_IN_BODY;
+ return true;
+ }
+ else if (firstChar == " " || firstChar == "\t")
+ {
+ // multi-line header if we've already seen a header line
+ if (!lastName)
+ {
+ dumpn("We don't have a header to continue!");
+ throw HTTP_400;
+ }
+
+ // append this line's text to the value; starts with SP/HT, so no need
+ // for separating whitespace
+ lastVal += lineText;
+ }
+ else
+ {
+ // we have a new header, so set the old one (if one existed)
+ if (lastName)
+ {
+ try
+ {
+ headers.setHeader(lastName, lastVal, true);
+ }
+ catch (e)
+ {
+ dumpn("*** setHeader threw on a header, e == " + e);
+ throw HTTP_400;
+ }
+ }
+
+ var colon = lineText.indexOf(":"); // first colon must be splitter
+ if (colon < 1)
+ {
+ dumpn("*** No colon or missing header field-name");
+ throw HTTP_400;
+ }
+
+ // set header name, value (to be set in the next loop, usually)
+ lastName = lineText.substring(0, colon);
+ lastVal = lineText.substring(colon + 1);
+ } // empty, continuation, start of header
+ } // while (true)
+ }
+};
+
+
+/** The character codes for CR and LF. */
+const CR = 0x0D, LF = 0x0A;
+
+/**
+ * Calculates the number of characters before the first CRLF pair in array, or
+ * -1 if the array contains no CRLF pair.
+ *
+ * @param array : Array
+ * an array of numbers in the range [0, 256), each representing a single
+ * character; the first CRLF is the lowest index i where
+ * |array[i] == "\r".charCodeAt(0)| and |array[i+1] == "\n".charCodeAt(0)|,
+ * if such an |i| exists, and -1 otherwise
+ * @param start : uint
+ * start index from which to begin searching in array
+ * @returns int
+ * the index of the first CRLF if any were present, -1 otherwise
+ */
+function findCRLF(array, start)
+{
+ for (var i = array.indexOf(CR, start); i >= 0; i = array.indexOf(CR, i + 1))
+ {
+ if (array[i + 1] == LF)
+ return i;
+ }
+ return -1;
+}
+
+
+/**
+ * A container which provides line-by-line access to the arrays of bytes with
+ * which it is seeded.
+ */
+function LineData()
+{
+ /** An array of queued bytes from which to get line-based characters. */
+ this._data = [];
+
+ /** Start index from which to search for CRLF. */
+ this._start = 0;
+}
+LineData.prototype =
+{
+ /**
+ * Appends the bytes in the given array to the internal data cache maintained
+ * by this.
+ */
+ appendBytes: function(bytes)
+ {
+ var count = bytes.length;
+ var quantum = 262144; // just above half SpiderMonkey's argument-count limit
+ if (count < quantum)
+ {
+ Array.prototype.push.apply(this._data, bytes);
+ return;
+ }
+
+ // Large numbers of bytes may cause Array.prototype.push to be called with
+ // more arguments than the JavaScript engine supports. In that case append
+ // bytes in fixed-size amounts until all bytes are appended.
+ for (var start = 0; start < count; start += quantum)
+ {
+ var slice = bytes.slice(start, Math.min(start + quantum, count));
+ Array.prototype.push.apply(this._data, slice);
+ }
+ },
+
+ /**
+ * Removes and returns a line of data, delimited by CRLF, from this.
+ *
+ * @param out
+ * an object whose "value" property will be set to the first line of text
+ * present in this, sans CRLF, if this contains a full CRLF-delimited line
+ * of text; if this doesn't contain enough data, the value of the property
+ * is undefined
+ * @returns boolean
+ * true if a full line of data could be read from the data in this, false
+ * otherwise
+ */
+ readLine: function(out)
+ {
+ var data = this._data;
+ var length = findCRLF(data, this._start);
+ if (length < 0)
+ {
+ this._start = data.length;
+
+ // But if our data ends in a CR, we have to back up one, because
+ // the first byte in the next packet might be an LF and if we
+ // start looking at data.length we won't find it.
+ if (data.length > 0 && data[data.length - 1] === CR)
+ --this._start;
+
+ return false;
+ }
+
+ // Reset for future lines.
+ this._start = 0;
+
+ //
+ // We have the index of the CR, so remove all the characters, including
+ // CRLF, from the array with splice, and convert the removed array
+ // (excluding the trailing CRLF characters) into the corresponding string.
+ //
+ var leading = data.splice(0, length + 2);
+ var quantum = 262144;
+ var line = "";
+ for (var start = 0; start < length; start += quantum)
+ {
+ var slice = leading.slice(start, Math.min(start + quantum, length));
+ line += String.fromCharCode.apply(null, slice);
+ }
+
+ out.value = line;
+ return true;
+ },
+
+ /**
+ * Removes the bytes currently within this and returns them in an array.
+ *
+ * @returns Array
+ * the bytes within this when this method is called
+ */
+ purge: function()
+ {
+ var data = this._data;
+ this._data = [];
+ return data;
+ }
+};
+
+
+
+/**
+ * Creates a request-handling function for an nsIHttpRequestHandler object.
+ */
+function createHandlerFunc(handler)
+{
+ return function(metadata, response) { handler.handle(metadata, response); };
+}
+
+
+/**
+ * The default handler for directories; writes an HTML response containing a
+ * slightly-formatted directory listing.
+ */
+function defaultIndexHandler(metadata, response)
+{
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var path = htmlEscape(decodeURI(metadata.path));
+
+ //
+ // Just do a very basic bit of directory listings -- no need for too much
+ // fanciness, especially since we don't have a style sheet in which we can
+ // stick rules (don't want to pollute the default path-space).
+ //
+
+ var body = '<html>\
+ <head>\
+ <title>' + path + '</title>\
+ </head>\
+ <body>\
+ <h1>' + path + '</h1>\
+ <ol style="list-style-type: none">';
+
+ var directory = metadata.getProperty("directory");
+ NS_ASSERT(directory && directory.isDirectory());
+
+ var fileList = [];
+ var files = directory.directoryEntries;
+ while (files.hasMoreElements())
+ {
+ var f = files.getNext().QueryInterface(Ci.nsIFile);
+ var name = f.leafName;
+ if (!f.isHidden() &&
+ (name.charAt(name.length - 1) != HIDDEN_CHAR ||
+ name.charAt(name.length - 2) == HIDDEN_CHAR))
+ fileList.push(f);
+ }
+
+ fileList.sort(fileSort);
+
+ for (var i = 0; i < fileList.length; i++)
+ {
+ var file = fileList[i];
+ try
+ {
+ var name = file.leafName;
+ if (name.charAt(name.length - 1) == HIDDEN_CHAR)
+ name = name.substring(0, name.length - 1);
+ var sep = file.isDirectory() ? "/" : "";
+
+ // Note: using " to delimit the attribute here because encodeURIComponent
+ // passes through '.
+ var item = '<li><a href="' + encodeURIComponent(name) + sep + '">' +
+ htmlEscape(name) + sep +
+ '</a></li>';
+
+ body += item;
+ }
+ catch (e) { /* some file system error, ignore the file */ }
+ }
+
+ body += ' </ol>\
+ </body>\
+ </html>';
+
+ response.bodyOutputStream.write(body, body.length);
+}
+
+/**
+ * Sorts a and b (nsIFile objects) into an aesthetically pleasing order.
+ */
+function fileSort(a, b)
+{
+ var dira = a.isDirectory(), dirb = b.isDirectory();
+
+ if (dira && !dirb)
+ return -1;
+ if (dirb && !dira)
+ return 1;
+
+ var namea = a.leafName.toLowerCase(), nameb = b.leafName.toLowerCase();
+ return nameb > namea ? -1 : 1;
+}
+
+
+/**
+ * Converts an externally-provided path into an internal path for use in
+ * determining file mappings.
+ *
+ * @param path
+ * the path to convert
+ * @param encoded
+ * true if the given path should be passed through decodeURI prior to
+ * conversion
+ * @throws URIError
+ * if path is incorrectly encoded
+ */
+function toInternalPath(path, encoded)
+{
+ if (encoded)
+ path = decodeURI(path);
+
+ var comps = path.split("/");
+ for (var i = 0, sz = comps.length; i < sz; i++)
+ {
+ var comp = comps[i];
+ if (comp.charAt(comp.length - 1) == HIDDEN_CHAR)
+ comps[i] = comp + HIDDEN_CHAR;
+ }
+ return comps.join("/");
+}
+
+const PERMS_READONLY = (4 << 6) | (4 << 3) | 4;
+
+/**
+ * Adds custom-specified headers for the given file to the given response, if
+ * any such headers are specified.
+ *
+ * @param file
+ * the file on the disk which is to be written
+ * @param metadata
+ * metadata about the incoming request
+ * @param response
+ * the Response to which any specified headers/data should be written
+ * @throws HTTP_500
+ * if an error occurred while processing custom-specified headers
+ */
+function maybeAddHeaders(file, metadata, response)
+{
+ var name = file.leafName;
+ if (name.charAt(name.length - 1) == HIDDEN_CHAR)
+ name = name.substring(0, name.length - 1);
+
+ var headerFile = file.parent;
+ headerFile.append(name + HEADERS_SUFFIX);
+
+ if (!headerFile.exists())
+ return;
+
+ const PR_RDONLY = 0x01;
+ var fis = new FileInputStream(headerFile, PR_RDONLY, PERMS_READONLY,
+ Ci.nsIFileInputStream.CLOSE_ON_EOF);
+
+ try
+ {
+ var lis = new ConverterInputStream(fis, "UTF-8", 1024, 0x0);
+ lis.QueryInterface(Ci.nsIUnicharLineInputStream);
+
+ var line = {value: ""};
+ var more = lis.readLine(line);
+
+ if (!more && line.value == "")
+ return;
+
+
+ // request line
+
+ var status = line.value;
+ if (status.indexOf("HTTP ") == 0)
+ {
+ status = status.substring(5);
+ var space = status.indexOf(" ");
+ var code, description;
+ if (space < 0)
+ {
+ code = status;
+ description = "";
+ }
+ else
+ {
+ code = status.substring(0, space);
+ description = status.substring(space + 1, status.length);
+ }
+
+ response.setStatusLine(metadata.httpVersion, parseInt(code, 10), description);
+
+ line.value = "";
+ more = lis.readLine(line);
+ }
+
+ // headers
+ while (more || line.value != "")
+ {
+ var header = line.value;
+ var colon = header.indexOf(":");
+
+ response.setHeader(header.substring(0, colon),
+ header.substring(colon + 1, header.length),
+ false); // allow overriding server-set headers
+
+ line.value = "";
+ more = lis.readLine(line);
+ }
+ }
+ catch (e)
+ {
+ dumpn("WARNING: error in headers for " + metadata.path + ": " + e);
+ throw HTTP_500;
+ }
+ finally
+ {
+ fis.close();
+ }
+}
+
+
+/**
+ * An object which handles requests for a server, executing default and
+ * overridden behaviors as instructed by the code which uses and manipulates it.
+ * Default behavior includes the paths / and /trace (diagnostics), with some
+ * support for HTTP error pages for various codes and fallback to HTTP 500 if
+ * those codes fail for any reason.
+ *
+ * @param server : nsHttpServer
+ * the server in which this handler is being used
+ */
+function ServerHandler(server)
+{
+ // FIELDS
+
+ /**
+ * The nsHttpServer instance associated with this handler.
+ */
+ this._server = server;
+
+ /**
+ * A FileMap object containing the set of path->nsILocalFile mappings for
+ * all directory mappings set in the server (e.g., "/" for /var/www/html/,
+ * "/foo/bar/" for /local/path/, and "/foo/bar/baz/" for /local/path2).
+ *
+ * Note carefully: the leading and trailing "/" in each path (not file) are
+ * removed before insertion to simplify the code which uses this. You have
+ * been warned!
+ */
+ this._pathDirectoryMap = new FileMap();
+
+ /**
+ * Custom request handlers for the server in which this resides. Path-handler
+ * pairs are stored as property-value pairs in this property.
+ *
+ * @see ServerHandler.prototype._defaultPaths
+ */
+ this._overridePaths = {};
+
+ /**
+ * Custom request handlers for the path prefixes on the server in which this
+ * resides. Path-handler pairs are stored as property-value pairs in this
+ * property.
+ *
+ * @see ServerHandler.prototype._defaultPaths
+ */
+ this._overridePrefixes = {};
+
+ /**
+ * Custom request handlers for the error handlers in the server in which this
+ * resides. Path-handler pairs are stored as property-value pairs in this
+ * property.
+ *
+ * @see ServerHandler.prototype._defaultErrors
+ */
+ this._overrideErrors = {};
+
+ /**
+ * Maps file extensions to their MIME types in the server, overriding any
+ * mapping that might or might not exist in the MIME service.
+ */
+ this._mimeMappings = {};
+
+ /**
+ * The default handler for requests for directories, used to serve directories
+ * when no index file is present.
+ */
+ this._indexHandler = defaultIndexHandler;
+
+ /** Per-path state storage for the server. */
+ this._state = {};
+
+ /** Entire-server state storage. */
+ this._sharedState = {};
+
+ /** Entire-server state storage for nsISupports values. */
+ this._objectState = {};
+}
+ServerHandler.prototype =
+{
+ // PUBLIC API
+
+ /**
+ * Handles a request to this server, responding to the request appropriately
+ * and initiating server shutdown if necessary.
+ *
+ * This method never throws an exception.
+ *
+ * @param connection : Connection
+ * the connection for this request
+ */
+ handleResponse: function(connection)
+ {
+ var request = connection.request;
+ var response = new Response(connection);
+
+ var path = request.path;
+ dumpn("*** path == " + path);
+
+ try
+ {
+ try
+ {
+ if (path in this._overridePaths)
+ {
+ // explicit paths first, then files based on existing directory mappings,
+ // then (if the file doesn't exist) built-in server default paths
+ dumpn("calling override for " + path);
+ this._overridePaths[path](request, response);
+ }
+ else
+ {
+ var longestPrefix = "";
+ for (let prefix in this._overridePrefixes) {
+ if (prefix.length > longestPrefix.length &&
+ path.substr(0, prefix.length) == prefix)
+ {
+ longestPrefix = prefix;
+ }
+ }
+ if (longestPrefix.length > 0)
+ {
+ dumpn("calling prefix override for " + longestPrefix);
+ this._overridePrefixes[longestPrefix](request, response);
+ }
+ else
+ {
+ this._handleDefault(request, response);
+ }
+ }
+ }
+ catch (e)
+ {
+ if (response.partiallySent())
+ {
+ response.abort(e);
+ return;
+ }
+
+ if (!(e instanceof HttpError))
+ {
+ dumpn("*** unexpected error: e == " + e);
+ throw HTTP_500;
+ }
+ if (e.code !== 404)
+ throw e;
+
+ dumpn("*** default: " + (path in this._defaultPaths));
+
+ response = new Response(connection);
+ if (path in this._defaultPaths)
+ this._defaultPaths[path](request, response);
+ else
+ throw HTTP_404;
+ }
+ }
+ catch (e)
+ {
+ if (response.partiallySent())
+ {
+ response.abort(e);
+ return;
+ }
+
+ var errorCode = "internal";
+
+ try
+ {
+ if (!(e instanceof HttpError))
+ throw e;
+
+ errorCode = e.code;
+ dumpn("*** errorCode == " + errorCode);
+
+ response = new Response(connection);
+ if (e.customErrorHandling)
+ e.customErrorHandling(response);
+ this._handleError(errorCode, request, response);
+ return;
+ }
+ catch (e2)
+ {
+ dumpn("*** error handling " + errorCode + " error: " +
+ "e2 == " + e2 + ", shutting down server");
+
+ connection.server._requestQuit();
+ response.abort(e2);
+ return;
+ }
+ }
+
+ response.complete();
+ },
+
+ //
+ // see nsIHttpServer.registerFile
+ //
+ registerFile: function(path, file)
+ {
+ if (!file)
+ {
+ dumpn("*** unregistering '" + path + "' mapping");
+ delete this._overridePaths[path];
+ return;
+ }
+
+ dumpn("*** registering '" + path + "' as mapping to " + file.path);
+ file = file.clone();
+
+ var self = this;
+ this._overridePaths[path] =
+ function(request, response)
+ {
+ if (!file.exists())
+ throw HTTP_404;
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ self._writeFileResponse(request, file, response, 0, file.fileSize);
+ };
+ },
+
+ //
+ // see nsIHttpServer.registerPathHandler
+ //
+ registerPathHandler: function(path, handler)
+ {
+ // XXX true path validation!
+ if (path.charAt(0) != "/")
+ throw Cr.NS_ERROR_INVALID_ARG;
+
+ this._handlerToField(handler, this._overridePaths, path);
+ },
+
+ //
+ // see nsIHttpServer.registerPrefixHandler
+ //
+ registerPrefixHandler: function(path, handler)
+ {
+ // XXX true path validation!
+ if (path.charAt(0) != "/" || path.charAt(path.length - 1) != "/")
+ throw Cr.NS_ERROR_INVALID_ARG;
+
+ this._handlerToField(handler, this._overridePrefixes, path);
+ },
+
+ //
+ // see nsIHttpServer.registerDirectory
+ //
+ registerDirectory: function(path, directory)
+ {
+ // strip off leading and trailing '/' so that we can use lastIndexOf when
+ // determining exactly how a path maps onto a mapped directory --
+ // conditional is required here to deal with "/".substring(1, 0) being
+ // converted to "/".substring(0, 1) per the JS specification
+ var key = path.length == 1 ? "" : path.substring(1, path.length - 1);
+
+ // the path-to-directory mapping code requires that the first character not
+ // be "/", or it will go into an infinite loop
+ if (key.charAt(0) == "/")
+ throw Cr.NS_ERROR_INVALID_ARG;
+
+ key = toInternalPath(key, false);
+
+ if (directory)
+ {
+ dumpn("*** mapping '" + path + "' to the location " + directory.path);
+ this._pathDirectoryMap.put(key, directory);
+ }
+ else
+ {
+ dumpn("*** removing mapping for '" + path + "'");
+ this._pathDirectoryMap.put(key, null);
+ }
+ },
+
+ //
+ // see nsIHttpServer.registerErrorHandler
+ //
+ registerErrorHandler: function(err, handler)
+ {
+ if (!(err in HTTP_ERROR_CODES))
+ dumpn("*** WARNING: registering non-HTTP/1.1 error code " +
+ "(" + err + ") handler -- was this intentional?");
+
+ this._handlerToField(handler, this._overrideErrors, err);
+ },
+
+ //
+ // see nsIHttpServer.setIndexHandler
+ //
+ setIndexHandler: function(handler)
+ {
+ if (!handler)
+ handler = defaultIndexHandler;
+ else if (typeof(handler) != "function")
+ handler = createHandlerFunc(handler);
+
+ this._indexHandler = handler;
+ },
+
+ //
+ // see nsIHttpServer.registerContentType
+ //
+ registerContentType: function(ext, type)
+ {
+ if (!type)
+ delete this._mimeMappings[ext];
+ else
+ this._mimeMappings[ext] = headerUtils.normalizeFieldValue(type);
+ },
+
+ // PRIVATE API
+
+ /**
+ * Sets or remove (if handler is null) a handler in an object with a key.
+ *
+ * @param handler
+ * a handler, either function or an nsIHttpRequestHandler
+ * @param dict
+ * The object to attach the handler to.
+ * @param key
+ * The field name of the handler.
+ */
+ _handlerToField: function(handler, dict, key)
+ {
+ // for convenience, handler can be a function if this is run from xpcshell
+ if (typeof(handler) == "function")
+ dict[key] = handler;
+ else if (handler)
+ dict[key] = createHandlerFunc(handler);
+ else
+ delete dict[key];
+ },
+
+ /**
+ * Handles a request which maps to a file in the local filesystem (if a base
+ * path has already been set; otherwise the 404 error is thrown).
+ *
+ * @param metadata : Request
+ * metadata for the incoming request
+ * @param response : Response
+ * an uninitialized Response to the given request, to be initialized by a
+ * request handler
+ * @throws HTTP_###
+ * if an HTTP error occurred (usually HTTP_404); note that in this case the
+ * calling code must handle post-processing of the response
+ */
+ _handleDefault: function(metadata, response)
+ {
+ dumpn("*** _handleDefault()");
+
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+
+ var path = metadata.path;
+ NS_ASSERT(path.charAt(0) == "/", "invalid path: <" + path + ">");
+
+ // determine the actual on-disk file; this requires finding the deepest
+ // path-to-directory mapping in the requested URL
+ var file = this._getFileForPath(path);
+
+ // the "file" might be a directory, in which case we either serve the
+ // contained index.html or make the index handler write the response
+ if (file.exists() && file.isDirectory())
+ {
+ file.append("index.html"); // make configurable?
+ if (!file.exists() || file.isDirectory())
+ {
+ metadata._ensurePropertyBag();
+ metadata._bag.setPropertyAsInterface("directory", file.parent);
+ this._indexHandler(metadata, response);
+ return;
+ }
+ }
+
+ // alternately, the file might not exist
+ if (!file.exists())
+ throw HTTP_404;
+
+ var start, end;
+ if (metadata._httpVersion.atLeast(nsHttpVersion.HTTP_1_1) &&
+ metadata.hasHeader("Range") &&
+ this._getTypeFromFile(file) !== SJS_TYPE)
+ {
+ var rangeMatch = metadata.getHeader("Range").match(/^bytes=(\d+)?-(\d+)?$/);
+ if (!rangeMatch)
+ {
+ dumpn("*** Range header bogosity: '" + metadata.getHeader("Range") + "'");
+ throw HTTP_400;
+ }
+
+ if (rangeMatch[1] !== undefined)
+ start = parseInt(rangeMatch[1], 10);
+
+ if (rangeMatch[2] !== undefined)
+ end = parseInt(rangeMatch[2], 10);
+
+ if (start === undefined && end === undefined)
+ {
+ dumpn("*** More Range header bogosity: '" + metadata.getHeader("Range") + "'");
+ throw HTTP_400;
+ }
+
+ // No start given, so the end is really the count of bytes from the
+ // end of the file.
+ if (start === undefined)
+ {
+ start = Math.max(0, file.fileSize - end);
+ end = file.fileSize - 1;
+ }
+
+ // start and end are inclusive
+ if (end === undefined || end >= file.fileSize)
+ end = file.fileSize - 1;
+
+ if (start !== undefined && start >= file.fileSize) {
+ var HTTP_416 = new HttpError(416, "Requested Range Not Satisfiable");
+ HTTP_416.customErrorHandling = function(errorResponse)
+ {
+ maybeAddHeaders(file, metadata, errorResponse);
+ };
+ throw HTTP_416;
+ }
+
+ if (end < start)
+ {
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ start = 0;
+ end = file.fileSize - 1;
+ }
+ else
+ {
+ response.setStatusLine(metadata.httpVersion, 206, "Partial Content");
+ var contentRange = "bytes " + start + "-" + end + "/" + file.fileSize;
+ response.setHeader("Content-Range", contentRange);
+ }
+ }
+ else
+ {
+ start = 0;
+ end = file.fileSize - 1;
+ }
+
+ // finally...
+ dumpn("*** handling '" + path + "' as mapping to " + file.path + " from " +
+ start + " to " + end + " inclusive");
+ this._writeFileResponse(metadata, file, response, start, end - start + 1);
+ },
+
+ /**
+ * Writes an HTTP response for the given file, including setting headers for
+ * file metadata.
+ *
+ * @param metadata : Request
+ * the Request for which a response is being generated
+ * @param file : nsILocalFile
+ * the file which is to be sent in the response
+ * @param response : Response
+ * the response to which the file should be written
+ * @param offset: uint
+ * the byte offset to skip to when writing
+ * @param count: uint
+ * the number of bytes to write
+ */
+ _writeFileResponse: function(metadata, file, response, offset, count)
+ {
+ const PR_RDONLY = 0x01;
+
+ var type = this._getTypeFromFile(file);
+ if (type === SJS_TYPE)
+ {
+ var fis = new FileInputStream(file, PR_RDONLY, PERMS_READONLY,
+ Ci.nsIFileInputStream.CLOSE_ON_EOF);
+
+ try
+ {
+ var sis = new ScriptableInputStream(fis);
+ var s = Cu.Sandbox(gGlobalObject);
+ s.importFunction(dump, "dump");
+
+ // Define a basic key-value state-preservation API across requests, with
+ // keys initially corresponding to the empty string.
+ var self = this;
+ var path = metadata.path;
+ s.importFunction(function getState(k)
+ {
+ return self._getState(path, k);
+ });
+ s.importFunction(function setState(k, v)
+ {
+ self._setState(path, k, v);
+ });
+ s.importFunction(function getSharedState(k)
+ {
+ return self._getSharedState(k);
+ });
+ s.importFunction(function setSharedState(k, v)
+ {
+ self._setSharedState(k, v);
+ });
+ s.importFunction(function getObjectState(k, callback)
+ {
+ callback(self._getObjectState(k));
+ });
+ s.importFunction(function setObjectState(k, v)
+ {
+ self._setObjectState(k, v);
+ });
+ s.importFunction(function registerPathHandler(p, h)
+ {
+ self.registerPathHandler(p, h);
+ });
+
+ // Make it possible for sjs files to access their location
+ this._setState(path, "__LOCATION__", file.path);
+
+ try
+ {
+ // Alas, the line number in errors dumped to console when calling the
+ // request handler is simply an offset from where we load the SJS file.
+ // Work around this in a reasonably non-fragile way by dynamically
+ // getting the line number where we evaluate the SJS file. Don't
+ // separate these two lines!
+ var line = new Error().lineNumber;
+ Cu.evalInSandbox(sis.read(file.fileSize), s, "latest");
+ }
+ catch (e)
+ {
+ dumpn("*** syntax error in SJS at " + file.path + ": " + e);
+ throw HTTP_500;
+ }
+
+ try
+ {
+ s.handleRequest(metadata, response);
+ }
+ catch (e)
+ {
+ dump("*** error running SJS at " + file.path + ": " +
+ e + " on line " +
+ (e instanceof Error
+ ? e.lineNumber + " in httpd.js"
+ : (e.lineNumber - line)) + "\n");
+ throw HTTP_500;
+ }
+ }
+ finally
+ {
+ fis.close();
+ }
+ }
+ else
+ {
+ try
+ {
+ response.setHeader("Last-Modified",
+ toDateString(file.lastModifiedTime),
+ false);
+ }
+ catch (e) { /* lastModifiedTime threw, ignore */ }
+
+ response.setHeader("Content-Type", type, false);
+ maybeAddHeaders(file, metadata, response);
+ response.setHeader("Content-Length", "" + count, false);
+
+ var fis = new FileInputStream(file, PR_RDONLY, PERMS_READONLY,
+ Ci.nsIFileInputStream.CLOSE_ON_EOF);
+
+ offset = offset || 0;
+ count = count || file.fileSize;
+ NS_ASSERT(offset === 0 || offset < file.fileSize, "bad offset");
+ NS_ASSERT(count >= 0, "bad count");
+ NS_ASSERT(offset + count <= file.fileSize, "bad total data size");
+
+ try
+ {
+ if (offset !== 0)
+ {
+ // Seek (or read, if seeking isn't supported) to the correct offset so
+ // the data sent to the client matches the requested range.
+ if (fis instanceof Ci.nsISeekableStream)
+ fis.seek(Ci.nsISeekableStream.NS_SEEK_SET, offset);
+ else
+ new ScriptableInputStream(fis).read(offset);
+ }
+ }
+ catch (e)
+ {
+ fis.close();
+ throw e;
+ }
+
+ function writeMore()
+ {
+ gThreadManager.currentThread
+ .dispatch(writeData, Ci.nsIThread.DISPATCH_NORMAL);
+ }
+
+ var input = new BinaryInputStream(fis);
+ var output = new BinaryOutputStream(response.bodyOutputStream);
+ var writeData =
+ {
+ run: function()
+ {
+ var chunkSize = Math.min(65536, count);
+ count -= chunkSize;
+ NS_ASSERT(count >= 0, "underflow");
+
+ try
+ {
+ var data = input.readByteArray(chunkSize);
+ NS_ASSERT(data.length === chunkSize,
+ "incorrect data returned? got " + data.length +
+ ", expected " + chunkSize);
+ output.writeByteArray(data, data.length);
+ if (count === 0)
+ {
+ fis.close();
+ response.finish();
+ }
+ else
+ {
+ writeMore();
+ }
+ }
+ catch (e)
+ {
+ try
+ {
+ fis.close();
+ }
+ finally
+ {
+ response.finish();
+ }
+ throw e;
+ }
+ }
+ };
+
+ writeMore();
+
+ // Now that we know copying will start, flag the response as async.
+ response.processAsync();
+ }
+ },
+
+ /**
+ * Get the value corresponding to a given key for the given path for SJS state
+ * preservation across requests.
+ *
+ * @param path : string
+ * the path from which the given state is to be retrieved
+ * @param k : string
+ * the key whose corresponding value is to be returned
+ * @returns string
+ * the corresponding value, which is initially the empty string
+ */
+ _getState: function(path, k)
+ {
+ var state = this._state;
+ if (path in state && k in state[path])
+ return state[path][k];
+ return "";
+ },
+
+ /**
+ * Set the value corresponding to a given key for the given path for SJS state
+ * preservation across requests.
+ *
+ * @param path : string
+ * the path from which the given state is to be retrieved
+ * @param k : string
+ * the key whose corresponding value is to be set
+ * @param v : string
+ * the value to be set
+ */
+ _setState: function(path, k, v)
+ {
+ if (typeof v !== "string")
+ throw new Error("non-string value passed");
+ var state = this._state;
+ if (!(path in state))
+ state[path] = {};
+ state[path][k] = v;
+ },
+
+ /**
+ * Get the value corresponding to a given key for SJS state preservation
+ * across requests.
+ *
+ * @param k : string
+ * the key whose corresponding value is to be returned
+ * @returns string
+ * the corresponding value, which is initially the empty string
+ */
+ _getSharedState: function(k)
+ {
+ var state = this._sharedState;
+ if (k in state)
+ return state[k];
+ return "";
+ },
+
+ /**
+ * Set the value corresponding to a given key for SJS state preservation
+ * across requests.
+ *
+ * @param k : string
+ * the key whose corresponding value is to be set
+ * @param v : string
+ * the value to be set
+ */
+ _setSharedState: function(k, v)
+ {
+ if (typeof v !== "string")
+ throw new Error("non-string value passed");
+ this._sharedState[k] = v;
+ },
+
+ /**
+ * Returns the object associated with the given key in the server for SJS
+ * state preservation across requests.
+ *
+ * @param k : string
+ * the key whose corresponding object is to be returned
+ * @returns nsISupports
+ * the corresponding object, or null if none was present
+ */
+ _getObjectState: function(k)
+ {
+ if (typeof k !== "string")
+ throw new Error("non-string key passed");
+ return this._objectState[k] || null;
+ },
+
+ /**
+ * Sets the object associated with the given key in the server for SJS
+ * state preservation across requests.
+ *
+ * @param k : string
+ * the key whose corresponding object is to be set
+ * @param v : nsISupports
+ * the object to be associated with the given key; may be null
+ */
+ _setObjectState: function(k, v)
+ {
+ if (typeof k !== "string")
+ throw new Error("non-string key passed");
+ if (typeof v !== "object")
+ throw new Error("non-object value passed");
+ if (v && !("QueryInterface" in v))
+ {
+ throw new Error("must pass an nsISupports; use wrappedJSObject to ease " +
+ "pain when using the server from JS");
+ }
+
+ this._objectState[k] = v;
+ },
+
+ /**
+ * Gets a content-type for the given file, first by checking for any custom
+ * MIME-types registered with this handler for the file's extension, second by
+ * asking the global MIME service for a content-type, and finally by failing
+ * over to application/octet-stream.
+ *
+ * @param file : nsIFile
+ * the nsIFile for which to get a file type
+ * @returns string
+ * the best content-type which can be determined for the file
+ */
+ _getTypeFromFile: function(file)
+ {
+ try
+ {
+ var name = file.leafName;
+ var dot = name.lastIndexOf(".");
+ if (dot > 0)
+ {
+ var ext = name.slice(dot + 1);
+ if (ext in this._mimeMappings)
+ return this._mimeMappings[ext];
+ }
+ return Cc["@mozilla.org/uriloader/external-helper-app-service;1"]
+ .getService(Ci.nsIMIMEService)
+ .getTypeFromFile(file);
+ }
+ catch (e)
+ {
+ return "application/octet-stream";
+ }
+ },
+
+ /**
+ * Returns the nsILocalFile which corresponds to the path, as determined using
+ * all registered path->directory mappings and any paths which are explicitly
+ * overridden.
+ *
+ * @param path : string
+ * the server path for which a file should be retrieved, e.g. "/foo/bar"
+ * @throws HttpError
+ * when the correct action is the corresponding HTTP error (i.e., because no
+ * mapping was found for a directory in path, the referenced file doesn't
+ * exist, etc.)
+ * @returns nsILocalFile
+ * the file to be sent as the response to a request for the path
+ */
+ _getFileForPath: function(path)
+ {
+ // decode and add underscores as necessary
+ try
+ {
+ path = toInternalPath(path, true);
+ }
+ catch (e)
+ {
+ dumpn("*** toInternalPath threw " + e);
+ throw HTTP_400; // malformed path
+ }
+
+ // next, get the directory which contains this path
+ var pathMap = this._pathDirectoryMap;
+
+ // An example progression of tmp for a path "/foo/bar/baz/" might be:
+ // "foo/bar/baz/", "foo/bar/baz", "foo/bar", "foo", ""
+ var tmp = path.substring(1);
+ while (true)
+ {
+ // do we have a match for current head of the path?
+ var file = pathMap.get(tmp);
+ if (file)
+ {
+ // XXX hack; basically disable showing mapping for /foo/bar/ when the
+ // requested path was /foo/bar, because relative links on the page
+ // will all be incorrect -- we really need the ability to easily
+ // redirect here instead
+ if (tmp == path.substring(1) &&
+ tmp.length != 0 &&
+ tmp.charAt(tmp.length - 1) != "/")
+ file = null;
+ else
+ break;
+ }
+
+ // if we've finished trying all prefixes, exit
+ if (tmp == "")
+ break;
+
+ tmp = tmp.substring(0, tmp.lastIndexOf("/"));
+ }
+
+ // no mapping applies, so 404
+ if (!file)
+ throw HTTP_404;
+
+
+ // last, get the file for the path within the determined directory
+ var parentFolder = file.parent;
+ var dirIsRoot = (parentFolder == null);
+
+ // Strategy here is to append components individually, making sure we
+ // never move above the given directory; this allows paths such as
+ // "<file>/foo/../bar" but prevents paths such as "<file>/../base-sibling";
+ // this component-wise approach also means the code works even on platforms
+ // which don't use "/" as the directory separator, such as Windows
+ var leafPath = path.substring(tmp.length + 1);
+ var comps = leafPath.split("/");
+ for (var i = 0, sz = comps.length; i < sz; i++)
+ {
+ var comp = comps[i];
+
+ if (comp == "..")
+ file = file.parent;
+ else if (comp == "." || comp == "")
+ continue;
+ else
+ file.append(comp);
+
+ if (!dirIsRoot && file.equals(parentFolder))
+ throw HTTP_403;
+ }
+
+ return file;
+ },
+
+ /**
+ * Writes the error page for the given HTTP error code over the given
+ * connection.
+ *
+ * @param errorCode : uint
+ * the HTTP error code to be used
+ * @param connection : Connection
+ * the connection on which the error occurred
+ */
+ handleError: function(errorCode, connection)
+ {
+ var response = new Response(connection);
+
+ dumpn("*** error in request: " + errorCode);
+
+ this._handleError(errorCode, new Request(connection.port), response);
+ },
+
+ /**
+ * Handles a request which generates the given error code, using the
+ * user-defined error handler if one has been set, gracefully falling back to
+ * the x00 status code if the code has no handler, and failing to status code
+ * 500 if all else fails.
+ *
+ * @param errorCode : uint
+ * the HTTP error which is to be returned
+ * @param metadata : Request
+ * metadata for the request, which will often be incomplete since this is an
+ * error
+ * @param response : Response
+ * an uninitialized Response should be initialized when this method
+ * completes with information which represents the desired error code in the
+ * ideal case or a fallback code in abnormal circumstances (i.e., 500 is a
+ * fallback for 505, per HTTP specs)
+ */
+ _handleError: function(errorCode, metadata, response)
+ {
+ if (!metadata)
+ throw Cr.NS_ERROR_NULL_POINTER;
+
+ var errorX00 = errorCode - (errorCode % 100);
+
+ try
+ {
+ if (!(errorCode in HTTP_ERROR_CODES))
+ dumpn("*** WARNING: requested invalid error: " + errorCode);
+
+ // RFC 2616 says that we should try to handle an error by its class if we
+ // can't otherwise handle it -- if that fails, we revert to handling it as
+ // a 500 internal server error, and if that fails we throw and shut down
+ // the server
+
+ // actually handle the error
+ try
+ {
+ if (errorCode in this._overrideErrors)
+ this._overrideErrors[errorCode](metadata, response);
+ else
+ this._defaultErrors[errorCode](metadata, response);
+ }
+ catch (e)
+ {
+ if (response.partiallySent())
+ {
+ response.abort(e);
+ return;
+ }
+
+ // don't retry the handler that threw
+ if (errorX00 == errorCode)
+ throw HTTP_500;
+
+ dumpn("*** error in handling for error code " + errorCode + ", " +
+ "falling back to " + errorX00 + "...");
+ response = new Response(response._connection);
+ if (errorX00 in this._overrideErrors)
+ this._overrideErrors[errorX00](metadata, response);
+ else if (errorX00 in this._defaultErrors)
+ this._defaultErrors[errorX00](metadata, response);
+ else
+ throw HTTP_500;
+ }
+ }
+ catch (e)
+ {
+ if (response.partiallySent())
+ {
+ response.abort();
+ return;
+ }
+
+ // we've tried everything possible for a meaningful error -- now try 500
+ dumpn("*** error in handling for error code " + errorX00 + ", falling " +
+ "back to 500...");
+
+ try
+ {
+ response = new Response(response._connection);
+ if (500 in this._overrideErrors)
+ this._overrideErrors[500](metadata, response);
+ else
+ this._defaultErrors[500](metadata, response);
+ }
+ catch (e2)
+ {
+ dumpn("*** multiple errors in default error handlers!");
+ dumpn("*** e == " + e + ", e2 == " + e2);
+ response.abort(e2);
+ return;
+ }
+ }
+
+ response.complete();
+ },
+
+ // FIELDS
+
+ /**
+ * This object contains the default handlers for the various HTTP error codes.
+ */
+ _defaultErrors:
+ {
+ 400: function(metadata, response)
+ {
+ // none of the data in metadata is reliable, so hard-code everything here
+ response.setStatusLine("1.1", 400, "Bad Request");
+ response.setHeader("Content-Type", "text/plain;charset=utf-8", false);
+
+ var body = "Bad request\n";
+ response.bodyOutputStream.write(body, body.length);
+ },
+ 403: function(metadata, response)
+ {
+ response.setStatusLine(metadata.httpVersion, 403, "Forbidden");
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var body = "<html>\
+ <head><title>403 Forbidden</title></head>\
+ <body>\
+ <h1>403 Forbidden</h1>\
+ </body>\
+ </html>";
+ response.bodyOutputStream.write(body, body.length);
+ },
+ 404: function(metadata, response)
+ {
+ response.setStatusLine(metadata.httpVersion, 404, "Not Found");
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var body = "<html>\
+ <head><title>404 Not Found</title></head>\
+ <body>\
+ <h1>404 Not Found</h1>\
+ <p>\
+ <span style='font-family: monospace;'>" +
+ htmlEscape(metadata.path) +
+ "</span> was not found.\
+ </p>\
+ </body>\
+ </html>";
+ response.bodyOutputStream.write(body, body.length);
+ },
+ 416: function(metadata, response)
+ {
+ response.setStatusLine(metadata.httpVersion,
+ 416,
+ "Requested Range Not Satisfiable");
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var body = "<html>\
+ <head>\
+ <title>416 Requested Range Not Satisfiable</title></head>\
+ <body>\
+ <h1>416 Requested Range Not Satisfiable</h1>\
+ <p>The byte range was not valid for the\
+ requested resource.\
+ </p>\
+ </body>\
+ </html>";
+ response.bodyOutputStream.write(body, body.length);
+ },
+ 500: function(metadata, response)
+ {
+ response.setStatusLine(metadata.httpVersion,
+ 500,
+ "Internal Server Error");
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var body = "<html>\
+ <head><title>500 Internal Server Error</title></head>\
+ <body>\
+ <h1>500 Internal Server Error</h1>\
+ <p>Something's broken in this server and\
+ needs to be fixed.</p>\
+ </body>\
+ </html>";
+ response.bodyOutputStream.write(body, body.length);
+ },
+ 501: function(metadata, response)
+ {
+ response.setStatusLine(metadata.httpVersion, 501, "Not Implemented");
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var body = "<html>\
+ <head><title>501 Not Implemented</title></head>\
+ <body>\
+ <h1>501 Not Implemented</h1>\
+ <p>This server is not (yet) Apache.</p>\
+ </body>\
+ </html>";
+ response.bodyOutputStream.write(body, body.length);
+ },
+ 505: function(metadata, response)
+ {
+ response.setStatusLine("1.1", 505, "HTTP Version Not Supported");
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var body = "<html>\
+ <head><title>505 HTTP Version Not Supported</title></head>\
+ <body>\
+ <h1>505 HTTP Version Not Supported</h1>\
+ <p>This server only supports HTTP/1.0 and HTTP/1.1\
+ connections.</p>\
+ </body>\
+ </html>";
+ response.bodyOutputStream.write(body, body.length);
+ }
+ },
+
+ /**
+ * Contains handlers for the default set of URIs contained in this server.
+ */
+ _defaultPaths:
+ {
+ "/": function(metadata, response)
+ {
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "text/html;charset=utf-8", false);
+
+ var body = "<html>\
+ <head><title>httpd.js</title></head>\
+ <body>\
+ <h1>httpd.js</h1>\
+ <p>If you're seeing this page, httpd.js is up and\
+ serving requests! Now set a base path and serve some\
+ files!</p>\
+ </body>\
+ </html>";
+
+ response.bodyOutputStream.write(body, body.length);
+ },
+
+ "/trace": function(metadata, response)
+ {
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "text/plain;charset=utf-8", false);
+
+ var body = "Request-URI: " +
+ metadata.scheme + "://" + metadata.host + ":" + metadata.port +
+ metadata.path + "\n\n";
+ body += "Request (semantically equivalent, slightly reformatted):\n\n";
+ body += metadata.method + " " + metadata.path;
+
+ if (metadata.queryString)
+ body += "?" + metadata.queryString;
+
+ body += " HTTP/" + metadata.httpVersion + "\r\n";
+
+ var headEnum = metadata.headers;
+ while (headEnum.hasMoreElements())
+ {
+ var fieldName = headEnum.getNext()
+ .QueryInterface(Ci.nsISupportsString)
+ .data;
+ body += fieldName + ": " + metadata.getHeader(fieldName) + "\r\n";
+ }
+
+ response.bodyOutputStream.write(body, body.length);
+ }
+ }
+};
+
+
+/**
+ * Maps absolute paths to files on the local file system (as nsILocalFiles).
+ */
+function FileMap()
+{
+ /** Hash which will map paths to nsILocalFiles. */
+ this._map = {};
+}
+FileMap.prototype =
+{
+ // PUBLIC API
+
+ /**
+ * Maps key to a clone of the nsILocalFile value if value is non-null;
+ * otherwise, removes any extant mapping for key.
+ *
+ * @param key : string
+ * string to which a clone of value is mapped
+ * @param value : nsILocalFile
+ * the file to map to key, or null to remove a mapping
+ */
+ put: function(key, value)
+ {
+ if (value)
+ this._map[key] = value.clone();
+ else
+ delete this._map[key];
+ },
+
+ /**
+ * Returns a clone of the nsILocalFile mapped to key, or null if no such
+ * mapping exists.
+ *
+ * @param key : string
+ * key to which the returned file maps
+ * @returns nsILocalFile
+ * a clone of the mapped file, or null if no mapping exists
+ */
+ get: function(key)
+ {
+ var val = this._map[key];
+ return val ? val.clone() : null;
+ }
+};
+
+
+// Response CONSTANTS
+
+// token = *<any CHAR except CTLs or separators>
+// CHAR = <any US-ASCII character (0-127)>
+// CTL = <any US-ASCII control character (0-31) and DEL (127)>
+// separators = "(" | ")" | "<" | ">" | "@"
+// | "," | ";" | ":" | "\" | <">
+// | "/" | "[" | "]" | "?" | "="
+// | "{" | "}" | SP | HT
+const IS_TOKEN_ARRAY =
+ [0, 0, 0, 0, 0, 0, 0, 0, // 0
+ 0, 0, 0, 0, 0, 0, 0, 0, // 8
+ 0, 0, 0, 0, 0, 0, 0, 0, // 16
+ 0, 0, 0, 0, 0, 0, 0, 0, // 24
+
+ 0, 1, 0, 1, 1, 1, 1, 1, // 32
+ 0, 0, 1, 1, 0, 1, 1, 0, // 40
+ 1, 1, 1, 1, 1, 1, 1, 1, // 48
+ 1, 1, 0, 0, 0, 0, 0, 0, // 56
+
+ 0, 1, 1, 1, 1, 1, 1, 1, // 64
+ 1, 1, 1, 1, 1, 1, 1, 1, // 72
+ 1, 1, 1, 1, 1, 1, 1, 1, // 80
+ 1, 1, 1, 0, 0, 0, 1, 1, // 88
+
+ 1, 1, 1, 1, 1, 1, 1, 1, // 96
+ 1, 1, 1, 1, 1, 1, 1, 1, // 104
+ 1, 1, 1, 1, 1, 1, 1, 1, // 112
+ 1, 1, 1, 0, 1, 0, 1]; // 120
+
+
+/**
+ * Determines whether the given character code is a CTL.
+ *
+ * @param code : uint
+ * the character code
+ * @returns boolean
+ * true if code is a CTL, false otherwise
+ */
+function isCTL(code)
+{
+ return (code >= 0 && code <= 31) || (code == 127);
+}
+
+/**
+ * Represents a response to an HTTP request, encapsulating all details of that
+ * response. This includes all headers, the HTTP version, status code and
+ * explanation, and the entity itself.
+ *
+ * @param connection : Connection
+ * the connection over which this response is to be written
+ */
+function Response(connection)
+{
+ /** The connection over which this response will be written. */
+ this._connection = connection;
+
+ /**
+ * The HTTP version of this response; defaults to 1.1 if not set by the
+ * handler.
+ */
+ this._httpVersion = nsHttpVersion.HTTP_1_1;
+
+ /**
+ * The HTTP code of this response; defaults to 200.
+ */
+ this._httpCode = 200;
+
+ /**
+ * The description of the HTTP code in this response; defaults to "OK".
+ */
+ this._httpDescription = "OK";
+
+ /**
+ * An nsIHttpHeaders object in which the headers in this response should be
+ * stored. This property is null after the status line and headers have been
+ * written to the network, and it may be modified up until it is cleared,
+ * except if this._finished is set first (in which case headers are written
+ * asynchronously in response to a finish() call not preceded by
+ * flushHeaders()).
+ */
+ this._headers = new nsHttpHeaders();
+
+ /**
+ * Set to true when this response is ended (completely constructed if possible
+ * and the connection closed); further actions on this will then fail.
+ */
+ this._ended = false;
+
+ /**
+ * A stream used to hold data written to the body of this response.
+ */
+ this._bodyOutputStream = null;
+
+ /**
+ * A stream containing all data that has been written to the body of this
+ * response so far. (Async handlers make the data contained in this
+ * unreliable as a way of determining content length in general, but auxiliary
+ * saved information can sometimes be used to guarantee reliability.)
+ */
+ this._bodyInputStream = null;
+
+ /**
+ * A stream copier which copies data to the network. It is initially null
+ * until replaced with a copier for response headers; when headers have been
+ * fully sent it is replaced with a copier for the response body, remaining
+ * so for the duration of response processing.
+ */
+ this._asyncCopier = null;
+
+ /**
+ * True if this response has been designated as being processed
+ * asynchronously rather than for the duration of a single call to
+ * nsIHttpRequestHandler.handle.
+ */
+ this._processAsync = false;
+
+ /**
+ * True iff finish() has been called on this, signaling that no more changes
+ * to this may be made.
+ */
+ this._finished = false;
+
+ /**
+ * True iff powerSeized() has been called on this, signaling that this
+ * response is to be handled manually by the response handler (which may then
+ * send arbitrary data in response, even non-HTTP responses).
+ */
+ this._powerSeized = false;
+}
+Response.prototype =
+{
+ // PUBLIC CONSTRUCTION API
+
+ //
+ // see nsIHttpResponse.bodyOutputStream
+ //
+ get bodyOutputStream()
+ {
+ if (this._finished)
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+
+ if (!this._bodyOutputStream)
+ {
+ var pipe = new Pipe(true, false, Response.SEGMENT_SIZE, PR_UINT32_MAX,
+ null);
+ this._bodyOutputStream = pipe.outputStream;
+ this._bodyInputStream = pipe.inputStream;
+ if (this._processAsync || this._powerSeized)
+ this._startAsyncProcessor();
+ }
+
+ return this._bodyOutputStream;
+ },
+
+ //
+ // see nsIHttpResponse.write
+ //
+ write: function(data)
+ {
+ if (this._finished)
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+
+ var dataAsString = String(data);
+ this.bodyOutputStream.write(dataAsString, dataAsString.length);
+ },
+
+ //
+ // see nsIHttpResponse.setStatusLine
+ //
+ setStatusLine: function(httpVersion, code, description)
+ {
+ if (!this._headers || this._finished || this._powerSeized)
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+ this._ensureAlive();
+
+ if (!(code >= 0 && code < 1000))
+ throw Cr.NS_ERROR_INVALID_ARG;
+
+ try
+ {
+ var httpVer;
+ // avoid version construction for the most common cases
+ if (!httpVersion || httpVersion == "1.1")
+ httpVer = nsHttpVersion.HTTP_1_1;
+ else if (httpVersion == "1.0")
+ httpVer = nsHttpVersion.HTTP_1_0;
+ else
+ httpVer = new nsHttpVersion(httpVersion);
+ }
+ catch (e)
+ {
+ throw Cr.NS_ERROR_INVALID_ARG;
+ }
+
+ // Reason-Phrase = *<TEXT, excluding CR, LF>
+ // TEXT = <any OCTET except CTLs, but including LWS>
+ //
+ // XXX this ends up disallowing octets which aren't Unicode, I think -- not
+ // much to do if description is IDL'd as string
+ if (!description)
+ description = "";
+ for (var i = 0; i < description.length; i++)
+ if (isCTL(description.charCodeAt(i)) && description.charAt(i) != "\t")
+ throw Cr.NS_ERROR_INVALID_ARG;
+
+ // set the values only after validation to preserve atomicity
+ this._httpDescription = description;
+ this._httpCode = code;
+ this._httpVersion = httpVer;
+ },
+
+ //
+ // see nsIHttpResponse.setHeader
+ //
+ setHeader: function(name, value, merge)
+ {
+ if (!this._headers || this._finished || this._powerSeized)
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+ this._ensureAlive();
+
+ this._headers.setHeader(name, value, merge);
+ },
+
+ //
+ // see nsIHttpResponse.processAsync
+ //
+ processAsync: function()
+ {
+ if (this._finished)
+ throw Cr.NS_ERROR_UNEXPECTED;
+ if (this._powerSeized)
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+ if (this._processAsync)
+ return;
+ this._ensureAlive();
+
+ dumpn("*** processing connection " + this._connection.number + " async");
+ this._processAsync = true;
+
+ /*
+ * Either the bodyOutputStream getter or this method is responsible for
+ * starting the asynchronous processor and catching writes of data to the
+ * response body of async responses as they happen, for the purpose of
+ * forwarding those writes to the actual connection's output stream.
+ * If bodyOutputStream is accessed first, calling this method will create
+ * the processor (when it first is clear that body data is to be written
+ * immediately, not buffered). If this method is called first, accessing
+ * bodyOutputStream will create the processor. If only this method is
+ * called, we'll write nothing, neither headers nor the nonexistent body,
+ * until finish() is called. Since that delay is easily avoided by simply
+ * getting bodyOutputStream or calling write(""), we don't worry about it.
+ */
+ if (this._bodyOutputStream && !this._asyncCopier)
+ this._startAsyncProcessor();
+ },
+
+ //
+ // see nsIHttpResponse.seizePower
+ //
+ seizePower: function()
+ {
+ if (this._processAsync)
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+ if (this._finished)
+ throw Cr.NS_ERROR_UNEXPECTED;
+ if (this._powerSeized)
+ return;
+ this._ensureAlive();
+
+ dumpn("*** forcefully seizing power over connection " +
+ this._connection.number + "...");
+
+ // Purge any already-written data without sending it. We could as easily
+ // swap out the streams entirely, but that makes it possible to acquire and
+ // unknowingly use a stale reference, so we require there only be one of
+ // each stream ever for any response to avoid this complication.
+ if (this._asyncCopier)
+ this._asyncCopier.cancel(Cr.NS_BINDING_ABORTED);
+ this._asyncCopier = null;
+ if (this._bodyOutputStream)
+ {
+ var input = new BinaryInputStream(this._bodyInputStream);
+ var avail;
+ while ((avail = input.available()) > 0)
+ input.readByteArray(avail);
+ }
+
+ this._powerSeized = true;
+ if (this._bodyOutputStream)
+ this._startAsyncProcessor();
+ },
+
+ //
+ // see nsIHttpResponse.finish
+ //
+ finish: function()
+ {
+ if (!this._processAsync && !this._powerSeized)
+ throw Cr.NS_ERROR_UNEXPECTED;
+ if (this._finished)
+ return;
+
+ dumpn("*** finishing connection " + this._connection.number);
+ this._startAsyncProcessor(); // in case bodyOutputStream was never accessed
+ if (this._bodyOutputStream)
+ this._bodyOutputStream.close();
+ this._finished = true;
+ },
+
+
+ // NSISUPPORTS
+
+ //
+ // see nsISupports.QueryInterface
+ //
+ QueryInterface: function(iid)
+ {
+ if (iid.equals(Ci.nsIHttpResponse) || iid.equals(Ci.nsISupports))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+
+ // POST-CONSTRUCTION API (not exposed externally)
+
+ /**
+ * The HTTP version number of this, as a string (e.g. "1.1").
+ */
+ get httpVersion()
+ {
+ this._ensureAlive();
+ return this._httpVersion.toString();
+ },
+
+ /**
+ * The HTTP status code of this response, as a string of three characters per
+ * RFC 2616.
+ */
+ get httpCode()
+ {
+ this._ensureAlive();
+
+ var codeString = (this._httpCode < 10 ? "0" : "") +
+ (this._httpCode < 100 ? "0" : "") +
+ this._httpCode;
+ return codeString;
+ },
+
+ /**
+ * The description of the HTTP status code of this response, or "" if none is
+ * set.
+ */
+ get httpDescription()
+ {
+ this._ensureAlive();
+
+ return this._httpDescription;
+ },
+
+ /**
+ * The headers in this response, as an nsHttpHeaders object.
+ */
+ get headers()
+ {
+ this._ensureAlive();
+
+ return this._headers;
+ },
+
+ //
+ // see nsHttpHeaders.getHeader
+ //
+ getHeader: function(name)
+ {
+ this._ensureAlive();
+
+ return this._headers.getHeader(name);
+ },
+
+ /**
+ * Determines whether this response may be abandoned in favor of a newly
+ * constructed response. A response may be abandoned only if it is not being
+ * sent asynchronously and if raw control over it has not been taken from the
+ * server.
+ *
+ * @returns boolean
+ * true iff no data has been written to the network
+ */
+ partiallySent: function()
+ {
+ dumpn("*** partiallySent()");
+ return this._processAsync || this._powerSeized;
+ },
+
+ /**
+ * If necessary, kicks off the remaining request processing needed to be done
+ * after a request handler performs its initial work upon this response.
+ */
+ complete: function()
+ {
+ dumpn("*** complete()");
+ if (this._processAsync || this._powerSeized)
+ {
+ NS_ASSERT(this._processAsync ^ this._powerSeized,
+ "can't both send async and relinquish power");
+ return;
+ }
+
+ NS_ASSERT(!this.partiallySent(), "completing a partially-sent response?");
+
+ this._startAsyncProcessor();
+
+ // Now make sure we finish processing this request!
+ if (this._bodyOutputStream)
+ this._bodyOutputStream.close();
+ },
+
+ /**
+ * Abruptly ends processing of this response, usually due to an error in an
+ * incoming request but potentially due to a bad error handler. Since we
+ * cannot handle the error in the usual way (giving an HTTP error page in
+ * response) because data may already have been sent (or because the response
+ * might be expected to have been generated asynchronously or completely from
+ * scratch by the handler), we stop processing this response and abruptly
+ * close the connection.
+ *
+ * @param e : Error
+ * the exception which precipitated this abort, or null if no such exception
+ * was generated
+ */
+ abort: function(e)
+ {
+ dumpn("*** abort(<" + e + ">)");
+
+ // This response will be ended by the processor if one was created.
+ var copier = this._asyncCopier;
+ if (copier)
+ {
+ // We dispatch asynchronously here so that any pending writes of data to
+ // the connection will be deterministically written. This makes it easier
+ // to specify exact behavior, and it makes observable behavior more
+ // predictable for clients. Note that the correctness of this depends on
+ // callbacks in response to _waitToReadData in WriteThroughCopier
+ // happening asynchronously with respect to the actual writing of data to
+ // bodyOutputStream, as they currently do; if they happened synchronously,
+ // an event which ran before this one could write more data to the
+ // response body before we get around to canceling the copier. We have
+ // tests for this in test_seizepower.js, however, and I can't think of a
+ // way to handle both cases without removing bodyOutputStream access and
+ // moving its effective write(data, length) method onto Response, which
+ // would be slower and require more code than this anyway.
+ gThreadManager.currentThread.dispatch({
+ run: function()
+ {
+ dumpn("*** canceling copy asynchronously...");
+ copier.cancel(Cr.NS_ERROR_UNEXPECTED);
+ }
+ }, Ci.nsIThread.DISPATCH_NORMAL);
+ }
+ else
+ {
+ this.end();
+ }
+ },
+
+ /**
+ * Closes this response's network connection, marks the response as finished,
+ * and notifies the server handler that the request is done being processed.
+ */
+ end: function()
+ {
+ NS_ASSERT(!this._ended, "ending this response twice?!?!");
+
+ this._connection.close();
+ if (this._bodyOutputStream)
+ this._bodyOutputStream.close();
+
+ this._finished = true;
+ this._ended = true;
+ },
+
+ // PRIVATE IMPLEMENTATION
+
+ /**
+ * Sends the status line and headers of this response if they haven't been
+ * sent and initiates the process of copying data written to this response's
+ * body to the network.
+ */
+ _startAsyncProcessor: function()
+ {
+ dumpn("*** _startAsyncProcessor()");
+
+ // Handle cases where we're being called a second time. The former case
+ // happens when this is triggered both by complete() and by processAsync(),
+ // while the latter happens when processAsync() in conjunction with sent
+ // data causes abort() to be called.
+ if (this._asyncCopier || this._ended)
+ {
+ dumpn("*** ignoring second call to _startAsyncProcessor");
+ return;
+ }
+
+ // Send headers if they haven't been sent already and should be sent, then
+ // asynchronously continue to send the body.
+ if (this._headers && !this._powerSeized)
+ {
+ this._sendHeaders();
+ return;
+ }
+
+ this._headers = null;
+ this._sendBody();
+ },
+
+ /**
+ * Signals that all modifications to the response status line and headers are
+ * complete and then sends that data over the network to the client. Once
+ * this method completes, a different response to the request that resulted
+ * in this response cannot be sent -- the only possible action in case of
+ * error is to abort the response and close the connection.
+ */
+ _sendHeaders: function()
+ {
+ dumpn("*** _sendHeaders()");
+
+ NS_ASSERT(this._headers);
+ NS_ASSERT(!this._powerSeized);
+
+ // request-line
+ var statusLine = "HTTP/" + this.httpVersion + " " +
+ this.httpCode + " " +
+ this.httpDescription + "\r\n";
+
+ // header post-processing
+
+ var headers = this._headers;
+ headers.setHeader("Connection", "close", false);
+ headers.setHeader("Server", "httpd.js", false);
+ if (!headers.hasHeader("Date"))
+ headers.setHeader("Date", toDateString(Date.now()), false);
+
+ // Any response not being processed asynchronously must have an associated
+ // Content-Length header for reasons of backwards compatibility with the
+ // initial server, which fully buffered every response before sending it.
+ // Beyond that, however, it's good to do this anyway because otherwise it's
+ // impossible to test behaviors that depend on the presence or absence of a
+ // Content-Length header.
+ if (!this._processAsync)
+ {
+ dumpn("*** non-async response, set Content-Length");
+
+ var bodyStream = this._bodyInputStream;
+ var avail = bodyStream ? bodyStream.available() : 0;
+
+ // XXX assumes stream will always report the full amount of data available
+ headers.setHeader("Content-Length", "" + avail, false);
+ }
+
+
+ // construct and send response
+ dumpn("*** header post-processing completed, sending response head...");
+
+ // request-line
+ var preambleData = [statusLine];
+
+ // headers
+ var headEnum = headers.enumerator;
+ while (headEnum.hasMoreElements())
+ {
+ var fieldName = headEnum.getNext()
+ .QueryInterface(Ci.nsISupportsString)
+ .data;
+ var values = headers.getHeaderValues(fieldName);
+ for (var i = 0, sz = values.length; i < sz; i++)
+ preambleData.push(fieldName + ": " + values[i] + "\r\n");
+ }
+
+ // end request-line/headers
+ preambleData.push("\r\n");
+
+ var preamble = preambleData.join("");
+
+ var responseHeadPipe = new Pipe(true, false, 0, PR_UINT32_MAX, null);
+ responseHeadPipe.outputStream.write(preamble, preamble.length);
+
+ var response = this;
+ var copyObserver =
+ {
+ onStartRequest: function(request, cx)
+ {
+ dumpn("*** preamble copying started");
+ },
+
+ onStopRequest: function(request, cx, statusCode)
+ {
+ dumpn("*** preamble copying complete " +
+ "[status=0x" + statusCode.toString(16) + "]");
+
+ if (!Components.isSuccessCode(statusCode))
+ {
+ dumpn("!!! header copying problems: non-success statusCode, " +
+ "ending response");
+
+ response.end();
+ }
+ else
+ {
+ response._sendBody();
+ }
+ },
+
+ QueryInterface: function(aIID)
+ {
+ if (aIID.equals(Ci.nsIRequestObserver) || aIID.equals(Ci.nsISupports))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ }
+ };
+
+ var headerCopier = this._asyncCopier =
+ new WriteThroughCopier(responseHeadPipe.inputStream,
+ this._connection.output,
+ copyObserver, null);
+
+ responseHeadPipe.outputStream.close();
+
+ // Forbid setting any more headers or modifying the request line.
+ this._headers = null;
+ },
+
+ /**
+ * Asynchronously writes the body of the response (or the entire response, if
+ * seizePower() has been called) to the network.
+ */
+ _sendBody: function()
+ {
+ dumpn("*** _sendBody");
+
+ NS_ASSERT(!this._headers, "still have headers around but sending body?");
+
+ // If no body data was written, we're done
+ if (!this._bodyInputStream)
+ {
+ dumpn("*** empty body, response finished");
+ this.end();
+ return;
+ }
+
+ var response = this;
+ var copyObserver =
+ {
+ onStartRequest: function(request, context)
+ {
+ dumpn("*** onStartRequest");
+ },
+
+ onStopRequest: function(request, cx, statusCode)
+ {
+ dumpn("*** onStopRequest [status=0x" + statusCode.toString(16) + "]");
+
+ if (statusCode === Cr.NS_BINDING_ABORTED)
+ {
+ dumpn("*** terminating copy observer without ending the response");
+ }
+ else
+ {
+ if (!Components.isSuccessCode(statusCode))
+ dumpn("*** WARNING: non-success statusCode in onStopRequest");
+
+ response.end();
+ }
+ },
+
+ QueryInterface: function(aIID)
+ {
+ if (aIID.equals(Ci.nsIRequestObserver) || aIID.equals(Ci.nsISupports))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ }
+ };
+
+ dumpn("*** starting async copier of body data...");
+ this._asyncCopier =
+ new WriteThroughCopier(this._bodyInputStream, this._connection.output,
+ copyObserver, null);
+ },
+
+ /** Ensures that this hasn't been ended. */
+ _ensureAlive: function()
+ {
+ NS_ASSERT(!this._ended, "not handling response lifetime correctly");
+ }
+};
+
+/**
+ * Size of the segments in the buffer used in storing response data and writing
+ * it to the socket.
+ */
+Response.SEGMENT_SIZE = 8192;
+
+/** Serves double duty in WriteThroughCopier implementation. */
+function notImplemented()
+{
+ throw Cr.NS_ERROR_NOT_IMPLEMENTED;
+}
+
+/** Returns true iff the given exception represents stream closure. */
+function streamClosed(e)
+{
+ return e === Cr.NS_BASE_STREAM_CLOSED ||
+ (typeof e === "object" && e.result === Cr.NS_BASE_STREAM_CLOSED);
+}
+
+/** Returns true iff the given exception represents a blocked stream. */
+function wouldBlock(e)
+{
+ return e === Cr.NS_BASE_STREAM_WOULD_BLOCK ||
+ (typeof e === "object" && e.result === Cr.NS_BASE_STREAM_WOULD_BLOCK);
+}
+
+/**
+ * Copies data from source to sink as it becomes available, when that data can
+ * be written to sink without blocking.
+ *
+ * @param source : nsIAsyncInputStream
+ * the stream from which data is to be read
+ * @param sink : nsIAsyncOutputStream
+ * the stream to which data is to be copied
+ * @param observer : nsIRequestObserver
+ * an observer which will be notified when the copy starts and finishes
+ * @param context : nsISupports
+ * context passed to observer when notified of start/stop
+ * @throws NS_ERROR_NULL_POINTER
+ * if source, sink, or observer are null
+ */
+function WriteThroughCopier(source, sink, observer, context)
+{
+ if (!source || !sink || !observer)
+ throw Cr.NS_ERROR_NULL_POINTER;
+
+ /** Stream from which data is being read. */
+ this._source = source;
+
+ /** Stream to which data is being written. */
+ this._sink = sink;
+
+ /** Observer watching this copy. */
+ this._observer = observer;
+
+ /** Context for the observer watching this. */
+ this._context = context;
+
+ /**
+ * True iff this is currently being canceled (cancel has been called, the
+ * callback may not yet have been made).
+ */
+ this._canceled = false;
+
+ /**
+ * False until all data has been read from input and written to output, at
+ * which point this copy is completed and cancel() is asynchronously called.
+ */
+ this._completed = false;
+
+ /** Required by nsIRequest, meaningless. */
+ this.loadFlags = 0;
+ /** Required by nsIRequest, meaningless. */
+ this.loadGroup = null;
+ /** Required by nsIRequest, meaningless. */
+ this.name = "response-body-copy";
+
+ /** Status of this request. */
+ this.status = Cr.NS_OK;
+
+ /** Arrays of byte strings waiting to be written to output. */
+ this._pendingData = [];
+
+ // start copying
+ try
+ {
+ observer.onStartRequest(this, context);
+ this._waitToReadData();
+ this._waitForSinkClosure();
+ }
+ catch (e)
+ {
+ dumpn("!!! error starting copy: " + e +
+ ("lineNumber" in e ? ", line " + e.lineNumber : ""));
+ dumpn(e.stack);
+ this.cancel(Cr.NS_ERROR_UNEXPECTED);
+ }
+}
+WriteThroughCopier.prototype =
+{
+ /* nsISupports implementation */
+
+ QueryInterface: function(iid)
+ {
+ if (iid.equals(Ci.nsIInputStreamCallback) ||
+ iid.equals(Ci.nsIOutputStreamCallback) ||
+ iid.equals(Ci.nsIRequest) ||
+ iid.equals(Ci.nsISupports))
+ {
+ return this;
+ }
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+
+ // NSIINPUTSTREAMCALLBACK
+
+ /**
+ * Receives a more-data-in-input notification and writes the corresponding
+ * data to the output.
+ *
+ * @param input : nsIAsyncInputStream
+ * the input stream on whose data we have been waiting
+ */
+ onInputStreamReady: function(input)
+ {
+ if (this._source === null)
+ return;
+
+ dumpn("*** onInputStreamReady");
+
+ //
+ // Ordinarily we'll read a non-zero amount of data from input, queue it up
+ // to be written and then wait for further callbacks. The complications in
+ // this method are the cases where we deviate from that behavior when errors
+ // occur or when copying is drawing to a finish.
+ //
+ // The edge cases when reading data are:
+ //
+ // Zero data is read
+ // If zero data was read, we're at the end of available data, so we can
+ // should stop reading and move on to writing out what we have (or, if
+ // we've already done that, onto notifying of completion).
+ // A stream-closed exception is thrown
+ // This is effectively a less kind version of zero data being read; the
+ // only difference is that we notify of completion with that result
+ // rather than with NS_OK.
+ // Some other exception is thrown
+ // This is the least kind result. We don't know what happened, so we
+ // act as though the stream closed except that we notify of completion
+ // with the result NS_ERROR_UNEXPECTED.
+ //
+
+ var bytesWanted = 0, bytesConsumed = -1;
+ try
+ {
+ input = new BinaryInputStream(input);
+
+ bytesWanted = Math.min(input.available(), Response.SEGMENT_SIZE);
+ dumpn("*** input wanted: " + bytesWanted);
+
+ if (bytesWanted > 0)
+ {
+ var data = input.readByteArray(bytesWanted);
+ bytesConsumed = data.length;
+ this._pendingData.push(String.fromCharCode.apply(String, data));
+ }
+
+ dumpn("*** " + bytesConsumed + " bytes read");
+
+ // Handle the zero-data edge case in the same place as all other edge
+ // cases are handled.
+ if (bytesWanted === 0)
+ throw Cr.NS_BASE_STREAM_CLOSED;
+ }
+ catch (e)
+ {
+ if (streamClosed(e))
+ {
+ dumpn("*** input stream closed");
+ e = bytesWanted === 0 ? Cr.NS_OK : Cr.NS_ERROR_UNEXPECTED;
+ }
+ else
+ {
+ dumpn("!!! unexpected error reading from input, canceling: " + e);
+ e = Cr.NS_ERROR_UNEXPECTED;
+ }
+
+ this._doneReadingSource(e);
+ return;
+ }
+
+ var pendingData = this._pendingData;
+
+ NS_ASSERT(bytesConsumed > 0);
+ NS_ASSERT(pendingData.length > 0, "no pending data somehow?");
+ NS_ASSERT(pendingData[pendingData.length - 1].length > 0,
+ "buffered zero bytes of data?");
+
+ NS_ASSERT(this._source !== null);
+
+ // Reading has gone great, and we've gotten data to write now. What if we
+ // don't have a place to write that data, because output went away just
+ // before this read? Drop everything on the floor, including new data, and
+ // cancel at this point.
+ if (this._sink === null)
+ {
+ pendingData.length = 0;
+ this._doneReadingSource(Cr.NS_ERROR_UNEXPECTED);
+ return;
+ }
+
+ // Okay, we've read the data, and we know we have a place to write it. We
+ // need to queue up the data to be written, but *only* if none is queued
+ // already -- if data's already queued, the code that actually writes the
+ // data will make sure to wait on unconsumed pending data.
+ try
+ {
+ if (pendingData.length === 1)
+ this._waitToWriteData();
+ }
+ catch (e)
+ {
+ dumpn("!!! error waiting to write data just read, swallowing and " +
+ "writing only what we already have: " + e);
+ this._doneWritingToSink(Cr.NS_ERROR_UNEXPECTED);
+ return;
+ }
+
+ // Whee! We successfully read some data, and it's successfully queued up to
+ // be written. All that remains now is to wait for more data to read.
+ try
+ {
+ this._waitToReadData();
+ }
+ catch (e)
+ {
+ dumpn("!!! error waiting to read more data: " + e);
+ this._doneReadingSource(Cr.NS_ERROR_UNEXPECTED);
+ }
+ },
+
+
+ // NSIOUTPUTSTREAMCALLBACK
+
+ /**
+ * Callback when data may be written to the output stream without blocking, or
+ * when the output stream has been closed.
+ *
+ * @param output : nsIAsyncOutputStream
+ * the output stream on whose writability we've been waiting, also known as
+ * this._sink
+ */
+ onOutputStreamReady: function(output)
+ {
+ if (this._sink === null)
+ return;
+
+ dumpn("*** onOutputStreamReady");
+
+ var pendingData = this._pendingData;
+ if (pendingData.length === 0)
+ {
+ // There's no pending data to write. The only way this can happen is if
+ // we're waiting on the output stream's closure, so we can respond to a
+ // copying failure as quickly as possible (rather than waiting for data to
+ // be available to read and then fail to be copied). Therefore, we must
+ // be done now -- don't bother to attempt to write anything and wrap
+ // things up.
+ dumpn("!!! output stream closed prematurely, ending copy");
+
+ this._doneWritingToSink(Cr.NS_ERROR_UNEXPECTED);
+ return;
+ }
+
+
+ NS_ASSERT(pendingData[0].length > 0, "queued up an empty quantum?");
+
+ //
+ // Write out the first pending quantum of data. The possible errors here
+ // are:
+ //
+ // The write might fail because we can't write that much data
+ // Okay, we've written what we can now, so re-queue what's left and
+ // finish writing it out later.
+ // The write failed because the stream was closed
+ // Discard pending data that we can no longer write, stop reading, and
+ // signal that copying finished.
+ // Some other error occurred.
+ // Same as if the stream were closed, but notify with the status
+ // NS_ERROR_UNEXPECTED so the observer knows something was wonky.
+ //
+
+ try
+ {
+ var quantum = pendingData[0];
+
+ // XXX |quantum| isn't guaranteed to be ASCII, so we're relying on
+ // undefined behavior! We're only using this because writeByteArray
+ // is unusably broken for asynchronous output streams; see bug 532834
+ // for details.
+ var bytesWritten = output.write(quantum, quantum.length);
+ if (bytesWritten === quantum.length)
+ pendingData.shift();
+ else
+ pendingData[0] = quantum.substring(bytesWritten);
+
+ dumpn("*** wrote " + bytesWritten + " bytes of data");
+ }
+ catch (e)
+ {
+ if (wouldBlock(e))
+ {
+ NS_ASSERT(pendingData.length > 0,
+ "stream-blocking exception with no data to write?");
+ NS_ASSERT(pendingData[0].length > 0,
+ "stream-blocking exception with empty quantum?");
+ this._waitToWriteData();
+ return;
+ }
+
+ if (streamClosed(e))
+ dumpn("!!! output stream prematurely closed, signaling error...");
+ else
+ dumpn("!!! unknown error: " + e + ", quantum=" + quantum);
+
+ this._doneWritingToSink(Cr.NS_ERROR_UNEXPECTED);
+ return;
+ }
+
+ // The day is ours! Quantum written, now let's see if we have more data
+ // still to write.
+ try
+ {
+ if (pendingData.length > 0)
+ {
+ this._waitToWriteData();
+ return;
+ }
+ }
+ catch (e)
+ {
+ dumpn("!!! unexpected error waiting to write pending data: " + e);
+ this._doneWritingToSink(Cr.NS_ERROR_UNEXPECTED);
+ return;
+ }
+
+ // Okay, we have no more pending data to write -- but might we get more in
+ // the future?
+ if (this._source !== null)
+ {
+ /*
+ * If we might, then wait for the output stream to be closed. (We wait
+ * only for closure because we have no data to write -- and if we waited
+ * for a specific amount of data, we would get repeatedly notified for no
+ * reason if over time the output stream permitted more and more data to
+ * be written to it without blocking.)
+ */
+ this._waitForSinkClosure();
+ }
+ else
+ {
+ /*
+ * On the other hand, if we can't have more data because the input
+ * stream's gone away, then it's time to notify of copy completion.
+ * Victory!
+ */
+ this._sink = null;
+ this._cancelOrDispatchCancelCallback(Cr.NS_OK);
+ }
+ },
+
+
+ // NSIREQUEST
+
+ /** Returns true if the cancel observer hasn't been notified yet. */
+ isPending: function()
+ {
+ return !this._completed;
+ },
+
+ /** Not implemented, don't use! */
+ suspend: notImplemented,
+ /** Not implemented, don't use! */
+ resume: notImplemented,
+
+ /**
+ * Cancels data reading from input, asynchronously writes out any pending
+ * data, and causes the observer to be notified with the given error code when
+ * all writing has finished.
+ *
+ * @param status : nsresult
+ * the status to pass to the observer when data copying has been canceled
+ */
+ cancel: function(status)
+ {
+ dumpn("*** cancel(" + status.toString(16) + ")");
+
+ if (this._canceled)
+ {
+ dumpn("*** suppressing a late cancel");
+ return;
+ }
+
+ this._canceled = true;
+ this.status = status;
+
+ // We could be in the middle of absolutely anything at this point. Both
+ // input and output might still be around, we might have pending data to
+ // write, and in general we know nothing about the state of the world. We
+ // therefore must assume everything's in progress and take everything to its
+ // final steady state (or so far as it can go before we need to finish
+ // writing out remaining data).
+
+ this._doneReadingSource(status);
+ },
+
+
+ // PRIVATE IMPLEMENTATION
+
+ /**
+ * Stop reading input if we haven't already done so, passing e as the status
+ * when closing the stream, and kick off a copy-completion notice if no more
+ * data remains to be written.
+ *
+ * @param e : nsresult
+ * the status to be used when closing the input stream
+ */
+ _doneReadingSource: function(e)
+ {
+ dumpn("*** _doneReadingSource(0x" + e.toString(16) + ")");
+
+ this._finishSource(e);
+ if (this._pendingData.length === 0)
+ this._sink = null;
+ else
+ NS_ASSERT(this._sink !== null, "null output?");
+
+ // If we've written out all data read up to this point, then it's time to
+ // signal completion.
+ if (this._sink === null)
+ {
+ NS_ASSERT(this._pendingData.length === 0, "pending data still?");
+ this._cancelOrDispatchCancelCallback(e);
+ }
+ },
+
+ /**
+ * Stop writing output if we haven't already done so, discard any data that
+ * remained to be sent, close off input if it wasn't already closed, and kick
+ * off a copy-completion notice.
+ *
+ * @param e : nsresult
+ * the status to be used when closing input if it wasn't already closed
+ */
+ _doneWritingToSink: function(e)
+ {
+ dumpn("*** _doneWritingToSink(0x" + e.toString(16) + ")");
+
+ this._pendingData.length = 0;
+ this._sink = null;
+ this._doneReadingSource(e);
+ },
+
+ /**
+ * Completes processing of this copy: either by canceling the copy if it
+ * hasn't already been canceled using the provided status, or by dispatching
+ * the cancel callback event (with the originally provided status, of course)
+ * if it already has been canceled.
+ *
+ * @param status : nsresult
+ * the status code to use to cancel this, if this hasn't already been
+ * canceled
+ */
+ _cancelOrDispatchCancelCallback: function(status)
+ {
+ dumpn("*** _cancelOrDispatchCancelCallback(" + status + ")");
+
+ NS_ASSERT(this._source === null, "should have finished input");
+ NS_ASSERT(this._sink === null, "should have finished output");
+ NS_ASSERT(this._pendingData.length === 0, "should have no pending data");
+
+ if (!this._canceled)
+ {
+ this.cancel(status);
+ return;
+ }
+
+ var self = this;
+ var event =
+ {
+ run: function()
+ {
+ dumpn("*** onStopRequest async callback");
+
+ self._completed = true;
+ try
+ {
+ self._observer.onStopRequest(self, self._context, self.status);
+ }
+ catch (e)
+ {
+ NS_ASSERT(false,
+ "how are we throwing an exception here? we control " +
+ "all the callers! " + e);
+ }
+ }
+ };
+
+ gThreadManager.currentThread.dispatch(event, Ci.nsIThread.DISPATCH_NORMAL);
+ },
+
+ /**
+ * Kicks off another wait for more data to be available from the input stream.
+ */
+ _waitToReadData: function()
+ {
+ dumpn("*** _waitToReadData");
+ this._source.asyncWait(this, 0, Response.SEGMENT_SIZE,
+ gThreadManager.mainThread);
+ },
+
+ /**
+ * Kicks off another wait until data can be written to the output stream.
+ */
+ _waitToWriteData: function()
+ {
+ dumpn("*** _waitToWriteData");
+
+ var pendingData = this._pendingData;
+ NS_ASSERT(pendingData.length > 0, "no pending data to write?");
+ NS_ASSERT(pendingData[0].length > 0, "buffered an empty write?");
+
+ this._sink.asyncWait(this, 0, pendingData[0].length,
+ gThreadManager.mainThread);
+ },
+
+ /**
+ * Kicks off a wait for the sink to which data is being copied to be closed.
+ * We wait for stream closure when we don't have any data to be copied, rather
+ * than waiting to write a specific amount of data. We can't wait to write
+ * data because the sink might be infinitely writable, and if no data appears
+ * in the source for a long time we might have to spin quite a bit waiting to
+ * write, waiting to write again, &c. Waiting on stream closure instead means
+ * we'll get just one notification if the sink dies. Note that when data
+ * starts arriving from the sink we'll resume waiting for data to be written,
+ * dropping this closure-only callback entirely.
+ */
+ _waitForSinkClosure: function()
+ {
+ dumpn("*** _waitForSinkClosure");
+
+ this._sink.asyncWait(this, Ci.nsIAsyncOutputStream.WAIT_CLOSURE_ONLY, 0,
+ gThreadManager.mainThread);
+ },
+
+ /**
+ * Closes input with the given status, if it hasn't already been closed;
+ * otherwise a no-op.
+ *
+ * @param status : nsresult
+ * status code use to close the source stream if necessary
+ */
+ _finishSource: function(status)
+ {
+ dumpn("*** _finishSource(" + status.toString(16) + ")");
+
+ if (this._source !== null)
+ {
+ this._source.closeWithStatus(status);
+ this._source = null;
+ }
+ }
+};
+
+
+/**
+ * A container for utility functions used with HTTP headers.
+ */
+const headerUtils =
+{
+ /**
+ * Normalizes fieldName (by converting it to lowercase) and ensures it is a
+ * valid header field name (although not necessarily one specified in RFC
+ * 2616).
+ *
+ * @throws NS_ERROR_INVALID_ARG
+ * if fieldName does not match the field-name production in RFC 2616
+ * @returns string
+ * fieldName converted to lowercase if it is a valid header, for characters
+ * where case conversion is possible
+ */
+ normalizeFieldName: function(fieldName)
+ {
+ if (fieldName == "")
+ {
+ dumpn("*** Empty fieldName");
+ throw Cr.NS_ERROR_INVALID_ARG;
+ }
+
+ for (var i = 0, sz = fieldName.length; i < sz; i++)
+ {
+ if (!IS_TOKEN_ARRAY[fieldName.charCodeAt(i)])
+ {
+ dumpn(fieldName + " is not a valid header field name!");
+ throw Cr.NS_ERROR_INVALID_ARG;
+ }
+ }
+
+ return fieldName.toLowerCase();
+ },
+
+ /**
+ * Ensures that fieldValue is a valid header field value (although not
+ * necessarily as specified in RFC 2616 if the corresponding field name is
+ * part of the HTTP protocol), normalizes the value if it is, and
+ * returns the normalized value.
+ *
+ * @param fieldValue : string
+ * a value to be normalized as an HTTP header field value
+ * @throws NS_ERROR_INVALID_ARG
+ * if fieldValue does not match the field-value production in RFC 2616
+ * @returns string
+ * fieldValue as a normalized HTTP header field value
+ */
+ normalizeFieldValue: function(fieldValue)
+ {
+ // field-value = *( field-content | LWS )
+ // field-content = <the OCTETs making up the field-value
+ // and consisting of either *TEXT or combinations
+ // of token, separators, and quoted-string>
+ // TEXT = <any OCTET except CTLs,
+ // but including LWS>
+ // LWS = [CRLF] 1*( SP | HT )
+ //
+ // quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
+ // qdtext = <any TEXT except <">>
+ // quoted-pair = "\" CHAR
+ // CHAR = <any US-ASCII character (octets 0 - 127)>
+
+ // Any LWS that occurs between field-content MAY be replaced with a single
+ // SP before interpreting the field value or forwarding the message
+ // downstream (section 4.2); we replace 1*LWS with a single SP
+ var val = fieldValue.replace(/(?:(?:\r\n)?[ \t]+)+/g, " ");
+
+ // remove leading/trailing LWS (which has been converted to SP)
+ val = val.replace(/^ +/, "").replace(/ +$/, "");
+
+ // that should have taken care of all CTLs, so val should contain no CTLs
+ dumpn("*** Normalized value: '" + val + "'");
+ for (var i = 0, len = val.length; i < len; i++)
+ if (isCTL(val.charCodeAt(i)))
+ {
+ dump("*** Char " + i + " has charcode " + val.charCodeAt(i));
+ throw Cr.NS_ERROR_INVALID_ARG;
+ }
+
+ // XXX disallows quoted-pair where CHAR is a CTL -- will not invalidly
+ // normalize, however, so this can be construed as a tightening of the
+ // spec and not entirely as a bug
+ return val;
+ }
+};
+
+
+
+/**
+ * Converts the given string into a string which is safe for use in an HTML
+ * context.
+ *
+ * @param str : string
+ * the string to make HTML-safe
+ * @returns string
+ * an HTML-safe version of str
+ */
+function htmlEscape(str)
+{
+ // this is naive, but it'll work
+ var s = "";
+ for (var i = 0; i < str.length; i++)
+ s += "&#" + str.charCodeAt(i) + ";";
+ return s;
+}
+
+
+/**
+ * Constructs an object representing an HTTP version (see section 3.1).
+ *
+ * @param versionString
+ * a string of the form "#.#", where # is an non-negative decimal integer with
+ * or without leading zeros
+ * @throws
+ * if versionString does not specify a valid HTTP version number
+ */
+function nsHttpVersion(versionString)
+{
+ var matches = /^(\d+)\.(\d+)$/.exec(versionString);
+ if (!matches)
+ throw "Not a valid HTTP version!";
+
+ /** The major version number of this, as a number. */
+ this.major = parseInt(matches[1], 10);
+
+ /** The minor version number of this, as a number. */
+ this.minor = parseInt(matches[2], 10);
+
+ if (isNaN(this.major) || isNaN(this.minor) ||
+ this.major < 0 || this.minor < 0)
+ throw "Not a valid HTTP version!";
+}
+nsHttpVersion.prototype =
+{
+ /**
+ * Returns the standard string representation of the HTTP version represented
+ * by this (e.g., "1.1").
+ */
+ toString: function ()
+ {
+ return this.major + "." + this.minor;
+ },
+
+ /**
+ * Returns true if this represents the same HTTP version as otherVersion,
+ * false otherwise.
+ *
+ * @param otherVersion : nsHttpVersion
+ * the version to compare against this
+ */
+ equals: function (otherVersion)
+ {
+ return this.major == otherVersion.major &&
+ this.minor == otherVersion.minor;
+ },
+
+ /** True if this >= otherVersion, false otherwise. */
+ atLeast: function(otherVersion)
+ {
+ return this.major > otherVersion.major ||
+ (this.major == otherVersion.major &&
+ this.minor >= otherVersion.minor);
+ }
+};
+
+nsHttpVersion.HTTP_1_0 = new nsHttpVersion("1.0");
+nsHttpVersion.HTTP_1_1 = new nsHttpVersion("1.1");
+
+
+/**
+ * An object which stores HTTP headers for a request or response.
+ *
+ * Note that since headers are case-insensitive, this object converts headers to
+ * lowercase before storing them. This allows the getHeader and hasHeader
+ * methods to work correctly for any case of a header, but it means that the
+ * values returned by .enumerator may not be equal case-sensitively to the
+ * values passed to setHeader when adding headers to this.
+ */
+function nsHttpHeaders()
+{
+ /**
+ * A hash of headers, with header field names as the keys and header field
+ * values as the values. Header field names are case-insensitive, but upon
+ * insertion here they are converted to lowercase. Header field values are
+ * normalized upon insertion to contain no leading or trailing whitespace.
+ *
+ * Note also that per RFC 2616, section 4.2, two headers with the same name in
+ * a message may be treated as one header with the same field name and a field
+ * value consisting of the separate field values joined together with a "," in
+ * their original order. This hash stores multiple headers with the same name
+ * in this manner.
+ */
+ this._headers = {};
+}
+nsHttpHeaders.prototype =
+{
+ /**
+ * Sets the header represented by name and value in this.
+ *
+ * @param name : string
+ * the header name
+ * @param value : string
+ * the header value
+ * @throws NS_ERROR_INVALID_ARG
+ * if name or value is not a valid header component
+ */
+ setHeader: function(fieldName, fieldValue, merge)
+ {
+ var name = headerUtils.normalizeFieldName(fieldName);
+ var value = headerUtils.normalizeFieldValue(fieldValue);
+
+ // The following three headers are stored as arrays because their real-world
+ // syntax prevents joining individual headers into a single header using
+ // ",". See also <http://hg.mozilla.org/mozilla-central/diff/9b2a99adc05e/netwerk/protocol/http/src/nsHttpHeaderArray.cpp#l77>
+ if (merge && name in this._headers)
+ {
+ if (name === "www-authenticate" ||
+ name === "proxy-authenticate" ||
+ name === "set-cookie")
+ {
+ this._headers[name].push(value);
+ }
+ else
+ {
+ this._headers[name][0] += "," + value;
+ NS_ASSERT(this._headers[name].length === 1,
+ "how'd a non-special header have multiple values?")
+ }
+ }
+ else
+ {
+ this._headers[name] = [value];
+ }
+ },
+
+ /**
+ * Returns the value for the header specified by this.
+ *
+ * @throws NS_ERROR_INVALID_ARG
+ * if fieldName does not constitute a valid header field name
+ * @throws NS_ERROR_NOT_AVAILABLE
+ * if the given header does not exist in this
+ * @returns string
+ * the field value for the given header, possibly with non-semantic changes
+ * (i.e., leading/trailing whitespace stripped, whitespace runs replaced
+ * with spaces, etc.) at the option of the implementation; multiple
+ * instances of the header will be combined with a comma, except for
+ * the three headers noted in the description of getHeaderValues
+ */
+ getHeader: function(fieldName)
+ {
+ return this.getHeaderValues(fieldName).join("\n");
+ },
+
+ /**
+ * Returns the value for the header specified by fieldName as an array.
+ *
+ * @throws NS_ERROR_INVALID_ARG
+ * if fieldName does not constitute a valid header field name
+ * @throws NS_ERROR_NOT_AVAILABLE
+ * if the given header does not exist in this
+ * @returns [string]
+ * an array of all the header values in this for the given
+ * header name. Header values will generally be collapsed
+ * into a single header by joining all header values together
+ * with commas, but certain headers (Proxy-Authenticate,
+ * WWW-Authenticate, and Set-Cookie) violate the HTTP spec
+ * and cannot be collapsed in this manner. For these headers
+ * only, the returned array may contain multiple elements if
+ * that header has been added more than once.
+ */
+ getHeaderValues: function(fieldName)
+ {
+ var name = headerUtils.normalizeFieldName(fieldName);
+
+ if (name in this._headers)
+ return this._headers[name];
+ else
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+ },
+
+ /**
+ * Returns true if a header with the given field name exists in this, false
+ * otherwise.
+ *
+ * @param fieldName : string
+ * the field name whose existence is to be determined in this
+ * @throws NS_ERROR_INVALID_ARG
+ * if fieldName does not constitute a valid header field name
+ * @returns boolean
+ * true if the header's present, false otherwise
+ */
+ hasHeader: function(fieldName)
+ {
+ var name = headerUtils.normalizeFieldName(fieldName);
+ return (name in this._headers);
+ },
+
+ /**
+ * Returns a new enumerator over the field names of the headers in this, as
+ * nsISupportsStrings. The names returned will be in lowercase, regardless of
+ * how they were input using setHeader (header names are case-insensitive per
+ * RFC 2616).
+ */
+ get enumerator()
+ {
+ var headers = [];
+ for (var i in this._headers)
+ {
+ var supports = new SupportsString();
+ supports.data = i;
+ headers.push(supports);
+ }
+
+ return new nsSimpleEnumerator(headers);
+ }
+};
+
+
+/**
+ * Constructs an nsISimpleEnumerator for the given array of items.
+ *
+ * @param items : Array
+ * the items, which must all implement nsISupports
+ */
+function nsSimpleEnumerator(items)
+{
+ this._items = items;
+ this._nextIndex = 0;
+}
+nsSimpleEnumerator.prototype =
+{
+ hasMoreElements: function()
+ {
+ return this._nextIndex < this._items.length;
+ },
+ getNext: function()
+ {
+ if (!this.hasMoreElements())
+ throw Cr.NS_ERROR_NOT_AVAILABLE;
+
+ return this._items[this._nextIndex++];
+ },
+ QueryInterface: function(aIID)
+ {
+ if (Ci.nsISimpleEnumerator.equals(aIID) ||
+ Ci.nsISupports.equals(aIID))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ }
+};
+
+
+/**
+ * A representation of the data in an HTTP request.
+ *
+ * @param port : uint
+ * the port on which the server receiving this request runs
+ */
+function Request(port)
+{
+ /** Method of this request, e.g. GET or POST. */
+ this._method = "";
+
+ /** Path of the requested resource; empty paths are converted to '/'. */
+ this._path = "";
+
+ /** Query string, if any, associated with this request (not including '?'). */
+ this._queryString = "";
+
+ /** Scheme of requested resource, usually http, always lowercase. */
+ this._scheme = "http";
+
+ /** Hostname on which the requested resource resides. */
+ this._host = undefined;
+
+ /** Port number over which the request was received. */
+ this._port = port;
+
+ var bodyPipe = new Pipe(false, false, 0, PR_UINT32_MAX, null);
+
+ /** Stream from which data in this request's body may be read. */
+ this._bodyInputStream = bodyPipe.inputStream;
+
+ /** Stream to which data in this request's body is written. */
+ this._bodyOutputStream = bodyPipe.outputStream;
+
+ /**
+ * The headers in this request.
+ */
+ this._headers = new nsHttpHeaders();
+
+ /**
+ * For the addition of ad-hoc properties and new functionality without having
+ * to change nsIHttpRequest every time; currently lazily created, as its only
+ * use is in directory listings.
+ */
+ this._bag = null;
+}
+Request.prototype =
+{
+ // SERVER METADATA
+
+ //
+ // see nsIHttpRequest.scheme
+ //
+ get scheme()
+ {
+ return this._scheme;
+ },
+
+ //
+ // see nsIHttpRequest.host
+ //
+ get host()
+ {
+ return this._host;
+ },
+
+ //
+ // see nsIHttpRequest.port
+ //
+ get port()
+ {
+ return this._port;
+ },
+
+ // REQUEST LINE
+
+ //
+ // see nsIHttpRequest.method
+ //
+ get method()
+ {
+ return this._method;
+ },
+
+ //
+ // see nsIHttpRequest.httpVersion
+ //
+ get httpVersion()
+ {
+ return this._httpVersion.toString();
+ },
+
+ //
+ // see nsIHttpRequest.path
+ //
+ get path()
+ {
+ return this._path;
+ },
+
+ //
+ // see nsIHttpRequest.queryString
+ //
+ get queryString()
+ {
+ return this._queryString;
+ },
+
+ // HEADERS
+
+ //
+ // see nsIHttpRequest.getHeader
+ //
+ getHeader: function(name)
+ {
+ return this._headers.getHeader(name);
+ },
+
+ //
+ // see nsIHttpRequest.hasHeader
+ //
+ hasHeader: function(name)
+ {
+ return this._headers.hasHeader(name);
+ },
+
+ //
+ // see nsIHttpRequest.headers
+ //
+ get headers()
+ {
+ return this._headers.enumerator;
+ },
+
+ //
+ // see nsIPropertyBag.enumerator
+ //
+ get enumerator()
+ {
+ this._ensurePropertyBag();
+ return this._bag.enumerator;
+ },
+
+ //
+ // see nsIHttpRequest.headers
+ //
+ get bodyInputStream()
+ {
+ return this._bodyInputStream;
+ },
+
+ //
+ // see nsIPropertyBag.getProperty
+ //
+ getProperty: function(name)
+ {
+ this._ensurePropertyBag();
+ return this._bag.getProperty(name);
+ },
+
+
+ // NSISUPPORTS
+
+ //
+ // see nsISupports.QueryInterface
+ //
+ QueryInterface: function(iid)
+ {
+ if (iid.equals(Ci.nsIHttpRequest) || iid.equals(Ci.nsISupports))
+ return this;
+
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+
+ // PRIVATE IMPLEMENTATION
+
+ /** Ensures a property bag has been created for ad-hoc behaviors. */
+ _ensurePropertyBag: function()
+ {
+ if (!this._bag)
+ this._bag = new WritablePropertyBag();
+ }
+};
+
+
+// XPCOM trappings
+
+this.NSGetFactory = XPCOMUtils.generateNSGetFactory([nsHttpServer]);
+
+/**
+ * Creates a new HTTP server listening for loopback traffic on the given port,
+ * starts it, and runs the server until the server processes a shutdown request,
+ * spinning an event loop so that events posted by the server's socket are
+ * processed.
+ *
+ * This method is primarily intended for use in running this script from within
+ * xpcshell and running a functional HTTP server without having to deal with
+ * non-essential details.
+ *
+ * Note that running multiple servers using variants of this method probably
+ * doesn't work, simply due to how the internal event loop is spun and stopped.
+ *
+ * @note
+ * This method only works with Mozilla 1.9 (i.e., Firefox 3 or trunk code);
+ * you should use this server as a component in Mozilla 1.8.
+ * @param port
+ * the port on which the server will run, or -1 if there exists no preference
+ * for a specific port; note that attempting to use some values for this
+ * parameter (particularly those below 1024) may cause this method to throw or
+ * may result in the server being prematurely shut down
+ * @param basePath
+ * a local directory from which requests will be served (i.e., if this is
+ * "/home/jwalden/" then a request to /index.html will load
+ * /home/jwalden/index.html); if this is omitted, only the default URLs in
+ * this server implementation will be functional
+ */
+function server(port, basePath)
+{
+ if (basePath)
+ {
+ var lp = Cc["@mozilla.org/file/local;1"]
+ .createInstance(Ci.nsILocalFile);
+ lp.initWithPath(basePath);
+ }
+
+ // if you're running this, you probably want to see debugging info
+ DEBUG = true;
+
+ var srv = new nsHttpServer();
+ if (lp)
+ srv.registerDirectory("/", lp);
+ srv.registerContentType("sjs", SJS_TYPE);
+ srv.identity.setPrimary("http", "localhost", port);
+ srv.start(port);
+
+ var thread = gThreadManager.currentThread;
+ while (!srv.isStopped())
+ thread.processNextEvent(true);
+
+ // get rid of any pending requests
+ while (thread.hasPendingEvents())
+ thread.processNextEvent(true);
+
+ DEBUG = false;
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/json2.js b/services/sync/tps/extensions/mozmill/resource/stdlib/json2.js
new file mode 100644
index 000000000..281a7f713
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/json2.js
@@ -0,0 +1,469 @@
+/*
+ http://www.JSON.org/json2.js
+ 2008-05-25
+
+ Public Domain.
+
+ NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+
+ See http://www.JSON.org/js.html
+
+ This file creates a global JSON object containing two methods: stringify
+ and parse.
+
+ JSON.stringify(value, replacer, space)
+ value any JavaScript value, usually an object or array.
+
+ replacer an optional parameter that determines how object
+ values are stringified for objects without a toJSON
+ method. It can be a function or an array.
+
+ space an optional parameter that specifies the indentation
+ of nested structures. If it is omitted, the text will
+ be packed without extra whitespace. If it is a number,
+ it will specify the number of spaces to indent at each
+ level. If it is a string (such as '\t' or '&nbsp;'),
+ it contains the characters used to indent at each level.
+
+ This method produces a JSON text from a JavaScript value.
+
+ When an object value is found, if the object contains a toJSON
+ method, its toJSON method will be called and the result will be
+ stringified. A toJSON method does not serialize: it returns the
+ value represented by the name/value pair that should be serialized,
+ or undefined if nothing should be serialized. The toJSON method
+ will be passed the key associated with the value, and this will be
+ bound to the object holding the key.
+
+ For example, this would serialize Dates as ISO strings.
+
+ Date.prototype.toJSON = function (key) {
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ return this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z';
+ };
+
+ You can provide an optional replacer method. It will be passed the
+ key and value of each member, with this bound to the containing
+ object. The value that is returned from your method will be
+ serialized. If your method returns undefined, then the member will
+ be excluded from the serialization.
+
+ If the replacer parameter is an array, then it will be used to
+ select the members to be serialized. It filters the results such
+ that only members with keys listed in the replacer array are
+ stringified.
+
+ Values that do not have JSON representations, such as undefined or
+ functions, will not be serialized. Such values in objects will be
+ dropped; in arrays they will be replaced with null. You can use
+ a replacer function to replace those with JSON values.
+ JSON.stringify(undefined) returns undefined.
+
+ The optional space parameter produces a stringification of the
+ value that is filled with line breaks and indentation to make it
+ easier to read.
+
+ If the space parameter is a non-empty string, then that string will
+ be used for indentation. If the space parameter is a number, then
+ the indentation will be that many spaces.
+
+ Example:
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}]);
+ // text is '["e",{"pluribus":"unum"}]'
+
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
+ // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
+
+ text = JSON.stringify([new Date()], function (key, value) {
+ return this[key] instanceof Date ?
+ 'Date(' + this[key] + ')' : value;
+ });
+ // text is '["Date(---current time---)"]'
+
+
+ JSON.parse(text, reviver)
+ This method parses a JSON text to produce an object or array.
+ It can throw a SyntaxError exception.
+
+ The optional reviver parameter is a function that can filter and
+ transform the results. It receives each of the keys and values,
+ and its return value is used instead of the original value.
+ If it returns what it received, then the structure is not modified.
+ If it returns undefined then the member is deleted.
+
+ Example:
+
+ // Parse the text. Values that look like ISO date strings will
+ // be converted to Date objects.
+
+ myData = JSON.parse(text, function (key, value) {
+ var a;
+ if (typeof value === 'string') {
+ a =
+/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
+ if (a) {
+ return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
+ +a[5], +a[6]));
+ }
+ }
+ return value;
+ });
+
+ myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
+ var d;
+ if (typeof value === 'string' &&
+ value.slice(0, 5) === 'Date(' &&
+ value.slice(-1) === ')') {
+ d = new Date(value.slice(5, -1));
+ if (d) {
+ return d;
+ }
+ }
+ return value;
+ });
+
+
+ This is a reference implementation. You are free to copy, modify, or
+ redistribute.
+
+ This code should be minified before deployment.
+ See http://javascript.crockford.com/jsmin.html
+
+ USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
+ NOT CONTROL.
+*/
+
+/*jslint evil: true */
+
+/*global JSON */
+
+/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", call,
+ charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, getUTCMinutes,
+ getUTCMonth, getUTCSeconds, hasOwnProperty, join, lastIndex, length,
+ parse, propertyIsEnumerable, prototype, push, replace, slice, stringify,
+ test, toJSON, toString
+*/
+
+var EXPORTED_SYMBOLS = ["JSON"];
+
+// Create a JSON object only if one does not already exist. We create the
+// object in a closure to avoid creating global variables.
+
+ JSON = function () {
+
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ Date.prototype.toJSON = function (key) {
+
+ return this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z';
+ };
+
+ var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ escapeable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ gap,
+ indent,
+ meta = { // table of character substitutions
+ '\b': '\\b',
+ '\t': '\\t',
+ '\n': '\\n',
+ '\f': '\\f',
+ '\r': '\\r',
+ '"' : '\\"',
+ '\\': '\\\\'
+ },
+ rep;
+
+
+ function quote(string) {
+
+// If the string contains no control characters, no quote characters, and no
+// backslash characters, then we can safely slap some quotes around it.
+// Otherwise we must also replace the offending characters with safe escape
+// sequences.
+
+ escapeable.lastIndex = 0;
+ return escapeable.test(string) ?
+ '"' + string.replace(escapeable, function (a) {
+ var c = meta[a];
+ if (typeof c === 'string') {
+ return c;
+ }
+ return '\\u' + ('0000' +
+ (+(a.charCodeAt(0))).toString(16)).slice(-4);
+ }) + '"' :
+ '"' + string + '"';
+ }
+
+
+ function str(key, holder) {
+
+// Produce a string from holder[key].
+
+ var i, // The loop counter.
+ k, // The member key.
+ v, // The member value.
+ length,
+ mind = gap,
+ partial,
+ value = holder[key];
+
+// If the value has a toJSON method, call it to obtain a replacement value.
+
+ if (value && typeof value === 'object' &&
+ typeof value.toJSON === 'function') {
+ value = value.toJSON(key);
+ }
+
+// If we were called with a replacer function, then call the replacer to
+// obtain a replacement value.
+
+ if (typeof rep === 'function') {
+ value = rep.call(holder, key, value);
+ }
+
+// What happens next depends on the value's type.
+
+ switch (typeof value) {
+ case 'string':
+ return quote(value);
+
+ case 'number':
+
+// JSON numbers must be finite. Encode non-finite numbers as null.
+
+ return isFinite(value) ? String(value) : 'null';
+
+ case 'boolean':
+ case 'null':
+
+// If the value is a boolean or null, convert it to a string. Note:
+// typeof null does not produce 'null'. The case is included here in
+// the remote chance that this gets fixed someday.
+
+ return String(value);
+
+// If the type is 'object', we might be dealing with an object or an array or
+// null.
+
+ case 'object':
+
+// Due to a specification blunder in ECMAScript, typeof null is 'object',
+// so watch out for that case.
+
+ if (!value) {
+ return 'null';
+ }
+
+// Make an array to hold the partial results of stringifying this object value.
+
+ gap += indent;
+ partial = [];
+
+// If the object has a dontEnum length property, we'll treat it as an array.
+
+ if (typeof value.length === 'number' &&
+ !(value.propertyIsEnumerable('length'))) {
+
+// The object is an array. Stringify every element. Use null as a placeholder
+// for non-JSON values.
+
+ length = value.length;
+ for (i = 0; i < length; i += 1) {
+ partial[i] = str(i, value) || 'null';
+ }
+
+// Join all of the elements together, separated with commas, and wrap them in
+// brackets.
+
+ v = partial.length === 0 ? '[]' :
+ gap ? '[\n' + gap +
+ partial.join(',\n' + gap) + '\n' +
+ mind + ']' :
+ '[' + partial.join(',') + ']';
+ gap = mind;
+ return v;
+ }
+
+// If the replacer is an array, use it to select the members to be stringified.
+
+ if (rep && typeof rep === 'object') {
+ length = rep.length;
+ for (i = 0; i < length; i += 1) {
+ k = rep[i];
+ if (typeof k === 'string') {
+ v = str(k, value, rep);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ } else {
+
+// Otherwise, iterate through all of the keys in the object.
+
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = str(k, value, rep);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ }
+
+// Join all of the member texts together, separated with commas,
+// and wrap them in braces.
+
+ v = partial.length === 0 ? '{}' :
+ gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
+ mind + '}' : '{' + partial.join(',') + '}';
+ gap = mind;
+ return v;
+ }
+ }
+
+// Return the JSON object containing the stringify and parse methods.
+
+ return {
+ stringify: function (value, replacer, space) {
+
+// The stringify method takes a value and an optional replacer, and an optional
+// space parameter, and returns a JSON text. The replacer can be a function
+// that can replace values, or an array of strings that will select the keys.
+// A default replacer method can be provided. Use of the space parameter can
+// produce text that is more easily readable.
+
+ var i;
+ gap = '';
+ indent = '';
+
+// If the space parameter is a number, make an indent string containing that
+// many spaces.
+
+ if (typeof space === 'number') {
+ for (i = 0; i < space; i += 1) {
+ indent += ' ';
+ }
+
+// If the space parameter is a string, it will be used as the indent string.
+
+ } else if (typeof space === 'string') {
+ indent = space;
+ }
+
+// If there is a replacer, it must be a function or an array.
+// Otherwise, throw an error.
+
+ rep = replacer;
+ if (replacer && typeof replacer !== 'function' &&
+ (typeof replacer !== 'object' ||
+ typeof replacer.length !== 'number')) {
+ throw new Error('JSON.stringify');
+ }
+
+// Make a fake root object containing our value under the key of ''.
+// Return the result of stringifying the value.
+
+ return str('', {'': value});
+ },
+
+
+ parse: function (text, reviver) {
+
+// The parse method takes a text and an optional reviver function, and returns
+// a JavaScript value if the text is a valid JSON text.
+
+ var j;
+
+ function walk(holder, key) {
+
+// The walk method is used to recursively walk the resulting structure so
+// that modifications can be made.
+
+ var k, v, value = holder[key];
+ if (value && typeof value === 'object') {
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = walk(value, k);
+ if (v !== undefined) {
+ value[k] = v;
+ } else {
+ delete value[k];
+ }
+ }
+ }
+ }
+ return reviver.call(holder, key, value);
+ }
+
+
+// Parsing happens in four stages. In the first stage, we replace certain
+// Unicode characters with escape sequences. JavaScript handles many characters
+// incorrectly, either silently deleting them, or treating them as line endings.
+
+ cx.lastIndex = 0;
+ if (cx.test(text)) {
+ text = text.replace(cx, function (a) {
+ return '\\u' + ('0000' +
+ (+(a.charCodeAt(0))).toString(16)).slice(-4);
+ });
+ }
+
+// In the second stage, we run the text against regular expressions that look
+// for non-JSON patterns. We are especially concerned with '()' and 'new'
+// because they can cause invocation, and '=' because it can cause mutation.
+// But just to be safe, we want to reject all unexpected forms.
+
+// We split the second stage into 4 regexp operations in order to work around
+// crippling inefficiencies in IE's and Safari's regexp engines. First we
+// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
+// replace all simple value tokens with ']' characters. Third, we delete all
+// open brackets that follow a colon or comma or that begin the text. Finally,
+// we look to see that the remaining characters are only whitespace or ']' or
+// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
+
+ if (/^[\],:{}\s]*$/.
+test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
+replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
+replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
+
+// In the third stage we use the eval function to compile the text into a
+// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
+// in JavaScript: it can begin a block or an object literal. We wrap the text
+// in parens to eliminate the ambiguity.
+
+ j = eval('(' + text + ')');
+
+// In the optional fourth stage, we recursively walk the new structure, passing
+// each name/value pair to a reviver function for possible transformation.
+
+ return typeof reviver === 'function' ?
+ walk({'': j}, '') : j;
+ }
+
+// If the text is not JSON parseable, then a SyntaxError is thrown.
+
+ throw new SyntaxError('JSON.parse');
+ }
+ };
+ }();
+
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/objects.js b/services/sync/tps/extensions/mozmill/resource/stdlib/objects.js
new file mode 100644
index 000000000..576117145
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/objects.js
@@ -0,0 +1,54 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['getLength', ];//'compare'];
+
+var getLength = function (obj) {
+ var len = 0;
+ for (i in obj) {
+ len++;
+ }
+
+ return len;
+}
+
+// var logging = {}; Components.utils.import('resource://mozmill/stdlib/logging.js', logging);
+
+// var objectsLogger = logging.getLogger('objectsLogger');
+
+// var compare = function (obj1, obj2, depth, recursion) {
+// if (depth == undefined) {
+// var depth = 4;
+// }
+// if (recursion == undefined) {
+// var recursion = 0;
+// }
+//
+// if (recursion > depth) {
+// return true;
+// }
+//
+// if (typeof(obj1) != typeof(obj2)) {
+// return false;
+// }
+//
+// if (typeof(obj1) == "object" && typeof(obj2) == "object") {
+// if ([x for (x in obj1)].length != [x for (x in obj2)].length) {
+// return false;
+// }
+// for (i in obj1) {
+// recursion++;
+// var result = compare(obj1[i], obj2[i], depth, recursion);
+// objectsLogger.info(i+' in recursion '+result);
+// if (result == false) {
+// return false;
+// }
+// }
+// } else {
+// if (obj1 != obj2) {
+// return false;
+// }
+// }
+// return true;
+// }
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/os.js b/services/sync/tps/extensions/mozmill/resource/stdlib/os.js
new file mode 100644
index 000000000..ce88bea8a
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/os.js
@@ -0,0 +1,57 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['listDirectory', 'getFileForPath', 'abspath', 'getPlatform'];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/Services.jsm");
+
+function listDirectory(file) {
+ // file is the given directory (nsIFile)
+ var entries = file.directoryEntries;
+ var array = [];
+
+ while (entries.hasMoreElements()) {
+ var entry = entries.getNext();
+ entry.QueryInterface(Ci.nsIFile);
+ array.push(entry);
+ }
+
+ return array;
+}
+
+function getFileForPath(path) {
+ var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsILocalFile);
+ file.initWithPath(path);
+ return file;
+}
+
+function abspath(rel, file) {
+ var relSplit = rel.split('/');
+
+ if (relSplit[0] == '..' && !file.isDirectory()) {
+ file = file.parent;
+ }
+
+ for (var p of relSplit) {
+ if (p == '..') {
+ file = file.parent;
+ } else if (p == '.') {
+ if (!file.isDirectory()) {
+ file = file.parent;
+ }
+ } else {
+ file.append(p);
+ }
+ }
+
+ return file.path;
+}
+
+function getPlatform() {
+ return Services.appinfo.OS.toLowerCase();
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js b/services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js
new file mode 100644
index 000000000..2648afd27
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/securable-module.js
@@ -0,0 +1,370 @@
+/* ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Jetpack.
+ *
+ * The Initial Developer of the Original Code is Mozilla.
+ * Portions created by the Initial Developer are Copyright (C) 2007
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ * Atul Varma <atul@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+(function(global) {
+ const Cc = Components.classes;
+ const Ci = Components.interfaces;
+ const Cu = Components.utils;
+ const Cr = Components.results;
+
+ Cu.import("resource://gre/modules/NetUtil.jsm");
+
+ var exports = {};
+
+ var ios = Cc['@mozilla.org/network/io-service;1']
+ .getService(Ci.nsIIOService);
+
+ var systemPrincipal = Cc["@mozilla.org/systemprincipal;1"]
+ .createInstance(Ci.nsIPrincipal);
+
+ function resolvePrincipal(principal, defaultPrincipal) {
+ if (principal === undefined)
+ return defaultPrincipal;
+ if (principal == "system")
+ return systemPrincipal;
+ return principal;
+ }
+
+ // The base URI to we use when we're given relative URLs, if any.
+ var baseURI = null;
+ if (global.window)
+ baseURI = ios.newURI(global.location.href, null, null);
+ exports.baseURI = baseURI;
+
+ // The "parent" chrome URI to use if we're loading code that
+ // needs chrome privileges but may not have a filename that
+ // matches any of SpiderMonkey's defined system filename prefixes.
+ // The latter is needed so that wrappers can be automatically
+ // made for the code. For more information on this, see
+ // bug 418356:
+ //
+ // https://bugzilla.mozilla.org/show_bug.cgi?id=418356
+ var parentChromeURIString;
+ if (baseURI)
+ // We're being loaded from a chrome-privileged document, so
+ // use its URL as the parent string.
+ parentChromeURIString = baseURI.spec;
+ else
+ // We're being loaded from a chrome-privileged JS module or
+ // SecurableModule, so use its filename (which may itself
+ // contain a reference to a parent).
+ parentChromeURIString = Components.stack.filename;
+
+ function maybeParentifyFilename(filename) {
+ var doParentifyFilename = true;
+ try {
+ // TODO: Ideally we should just make
+ // nsIChromeRegistry.wrappersEnabled() available from script
+ // and use it here. Until that's in the platform, though,
+ // we'll play it safe and parentify the filename unless
+ // we're absolutely certain things will be ok if we don't.
+ var filenameURI = ios.newURI(options.filename,
+ null,
+ baseURI);
+ if (filenameURI.scheme == 'chrome' &&
+ filenameURI.path.indexOf('/content/') == 0)
+ // Content packages will always have wrappers made for them;
+ // if automatic wrappers have been disabled for the
+ // chrome package via a chrome manifest flag, then
+ // this still works too, to the extent that the
+ // content package is insecure anyways.
+ doParentifyFilename = false;
+ } catch (e) {}
+ if (doParentifyFilename)
+ return parentChromeURIString + " -> " + filename;
+ return filename;
+ }
+
+ function getRootDir(urlStr) {
+ // TODO: This feels hacky, and like there will be edge cases.
+ return urlStr.slice(0, urlStr.lastIndexOf("/") + 1);
+ }
+
+ exports.SandboxFactory = function SandboxFactory(defaultPrincipal) {
+ // Unless specified otherwise, use a principal with limited
+ // privileges.
+ this._defaultPrincipal = resolvePrincipal(defaultPrincipal,
+ "http://www.mozilla.org");
+ },
+
+ exports.SandboxFactory.prototype = {
+ createSandbox: function createSandbox(options) {
+ var principal = resolvePrincipal(options.principal,
+ this._defaultPrincipal);
+
+ return {
+ _sandbox: new Cu.Sandbox(principal),
+ _principal: principal,
+ get globalScope() {
+ return this._sandbox;
+ },
+ defineProperty: function defineProperty(name, value) {
+ this._sandbox[name] = value;
+ },
+ getProperty: function getProperty(name) {
+ return this._sandbox[name];
+ },
+ evaluate: function evaluate(options) {
+ if (typeof(options) == 'string')
+ options = {contents: options};
+ options = {__proto__: options};
+ if (typeof(options.contents) != 'string')
+ throw new Error('Expected string for options.contents');
+ if (options.lineNo === undefined)
+ options.lineNo = 1;
+ if (options.jsVersion === undefined)
+ options.jsVersion = "1.8";
+ if (typeof(options.filename) != 'string')
+ options.filename = '<string>';
+
+ if (this._principal == systemPrincipal)
+ options.filename = maybeParentifyFilename(options.filename);
+
+ return Cu.evalInSandbox(options.contents,
+ this._sandbox,
+ options.jsVersion,
+ options.filename,
+ options.lineNo);
+ }
+ };
+ }
+ };
+
+ exports.Loader = function Loader(options) {
+ options = {__proto__: options};
+ if (options.fs === undefined) {
+ var rootPaths = options.rootPath || options.rootPaths;
+ if (rootPaths) {
+ if (rootPaths.constructor.name != "Array")
+ rootPaths = [rootPaths];
+ var fses = rootPaths.map(path => new exports.LocalFileSystem(path));
+ options.fs = new exports.CompositeFileSystem(fses);
+ } else
+ options.fs = new exports.LocalFileSystem();
+ }
+ if (options.sandboxFactory === undefined)
+ options.sandboxFactory = new exports.SandboxFactory(
+ options.defaultPrincipal
+ );
+ if (options.modules === undefined)
+ options.modules = {};
+ if (options.globals === undefined)
+ options.globals = {};
+
+ this.fs = options.fs;
+ this.sandboxFactory = options.sandboxFactory;
+ this.sandboxes = {};
+ this.modules = options.modules;
+ this.globals = options.globals;
+ };
+
+ exports.Loader.prototype = {
+ _makeRequire: function _makeRequire(rootDir) {
+ var self = this;
+ return function require(module) {
+ if (module == "chrome") {
+ var chrome = { Cc: Components.classes,
+ Ci: Components.interfaces,
+ Cu: Components.utils,
+ Cr: Components.results,
+ Cm: Components.manager,
+ components: Components
+ };
+ return chrome;
+ }
+ var path = self.fs.resolveModule(rootDir, module);
+ if (!path)
+ throw new Error('Module "' + module + '" not found');
+ if (!(path in self.modules)) {
+ var options = self.fs.getFile(path);
+ if (options.filename === undefined)
+ options.filename = path;
+
+ var exports = {};
+ var sandbox = self.sandboxFactory.createSandbox(options);
+ self.sandboxes[path] = sandbox;
+ for (name in self.globals)
+ sandbox.defineProperty(name, self.globals[name]);
+ sandbox.defineProperty('require', self._makeRequire(path));
+ sandbox.evaluate("var exports = {};");
+ let ES5 = self.modules.es5;
+ if (ES5) {
+ let { Object, Array, Function } = sandbox.globalScope;
+ ES5.init(Object, Array, Function);
+ }
+ self.modules[path] = sandbox.getProperty("exports");
+ sandbox.evaluate(options);
+ }
+ return self.modules[path];
+ };
+ },
+
+ // This is only really used by unit tests and other
+ // development-related facilities, allowing access to symbols
+ // defined in the global scope of a module.
+ findSandboxForModule: function findSandboxForModule(module) {
+ var path = this.fs.resolveModule(null, module);
+ if (!path)
+ throw new Error('Module "' + module + '" not found');
+ if (!(path in this.sandboxes))
+ this.require(module);
+ if (!(path in this.sandboxes))
+ throw new Error('Internal error: path not in sandboxes: ' +
+ path);
+ return this.sandboxes[path];
+ },
+
+ require: function require(module) {
+ return (this._makeRequire(null))(module);
+ },
+
+ runScript: function runScript(options, extraOutput) {
+ if (typeof(options) == 'string')
+ options = {contents: options};
+ options = {__proto__: options};
+ var sandbox = this.sandboxFactory.createSandbox(options);
+ if (extraOutput)
+ extraOutput.sandbox = sandbox;
+ for (name in this.globals)
+ sandbox.defineProperty(name, this.globals[name]);
+ sandbox.defineProperty('require', this._makeRequire(null));
+ return sandbox.evaluate(options);
+ }
+ };
+
+ exports.CompositeFileSystem = function CompositeFileSystem(fses) {
+ this.fses = fses;
+ this._pathMap = {};
+ };
+
+ exports.CompositeFileSystem.prototype = {
+ resolveModule: function resolveModule(base, path) {
+ for (var i = 0; i < this.fses.length; i++) {
+ var fs = this.fses[i];
+ var absPath = fs.resolveModule(base, path);
+ if (absPath) {
+ this._pathMap[absPath] = fs;
+ return absPath;
+ }
+ }
+ return null;
+ },
+ getFile: function getFile(path) {
+ return this._pathMap[path].getFile(path);
+ }
+ };
+
+ exports.LocalFileSystem = function LocalFileSystem(root) {
+ if (root === undefined) {
+ if (!baseURI)
+ throw new Error("Need a root path for module filesystem");
+ root = baseURI;
+ }
+ if (typeof(root) == 'string')
+ root = ios.newURI(root, null, baseURI);
+ if (root instanceof Ci.nsIFile)
+ root = ios.newFileURI(root);
+ if (!(root instanceof Ci.nsIURI))
+ throw new Error('Expected nsIFile, nsIURI, or string for root');
+
+ this.root = root.spec;
+ this._rootURI = root;
+ this._rootURIDir = getRootDir(root.spec);
+ };
+
+ exports.LocalFileSystem.prototype = {
+ resolveModule: function resolveModule(base, path) {
+ path = path + ".js";
+
+ var baseURI;
+ if (!base)
+ baseURI = this._rootURI;
+ else
+ baseURI = ios.newURI(base, null, null);
+ var newURI = ios.newURI(path, null, baseURI);
+ var channel = NetUtil.newChannel({
+ uri: newURI,
+ loadUsingSystemPrincipal: true
+ });
+ try {
+ channel.open2().close();
+ } catch (e) {
+ if (e.result != Cr.NS_ERROR_FILE_NOT_FOUND) {
+ throw e;
+ }
+ return null;
+ }
+ return newURI.spec;
+ },
+ getFile: function getFile(path) {
+ var channel = NetUtil.newChannel({
+ uri: path,
+ loadUsingSystemPrincipal: true
+ });
+ var iStream = channel.open2();
+ var ciStream = Cc["@mozilla.org/intl/converter-input-stream;1"].
+ createInstance(Ci.nsIConverterInputStream);
+ var bufLen = 0x8000;
+ ciStream.init(iStream, "UTF-8", bufLen,
+ Ci.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER);
+ var chunk = {};
+ var data = "";
+ while (ciStream.readString(bufLen, chunk) > 0)
+ data += chunk.value;
+ ciStream.close();
+ iStream.close();
+ return {contents: data};
+ }
+ };
+
+ if (global.window) {
+ // We're being loaded in a chrome window, or a web page with
+ // UniversalXPConnect privileges.
+ global.SecurableModule = exports;
+ } else if (global.exports) {
+ // We're being loaded in a SecurableModule.
+ for (name in exports) {
+ global.exports[name] = exports[name];
+ }
+ } else {
+ // We're being loaded in a JS module.
+ global.EXPORTED_SYMBOLS = [];
+ for (name in exports) {
+ global.EXPORTED_SYMBOLS.push(name);
+ global[name] = exports[name];
+ }
+ }
+ })(this);
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/strings.js b/services/sync/tps/extensions/mozmill/resource/stdlib/strings.js
new file mode 100644
index 000000000..24a93d958
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/strings.js
@@ -0,0 +1,17 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ['trim', 'vslice'];
+
+var arrays = {}; Components.utils.import('resource://mozmill/stdlib/arrays.js', arrays);
+
+var trim = function (str) {
+ return (str.replace(/^[\s\xA0]+/, "").replace(/[\s\xA0]+$/, ""));
+}
+
+var vslice = function (str, svalue, evalue) {
+ var sindex = arrays.indexOf(str, svalue);
+ var eindex = arrays.rindexOf(str, evalue);
+ return str.slice(sindex + 1, eindex);
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/utils.js b/services/sync/tps/extensions/mozmill/resource/stdlib/utils.js
new file mode 100644
index 000000000..73e13e11f
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/utils.js
@@ -0,0 +1,455 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, you can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var EXPORTED_SYMBOLS = ["applicationName", "assert", "Copy", "getBrowserObject",
+ "getChromeWindow", "getWindows", "getWindowByTitle",
+ "getWindowByType", "getWindowId", "getMethodInWindows",
+ "getPreference", "saveDataURL", "setPreference",
+ "sleep", "startTimer", "stopTimer", "takeScreenshot",
+ "unwrapNode", "waitFor"
+ ];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+
+
+Cu.import("resource://gre/modules/NetUtil.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+
+const applicationIdMap = {
+ '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}': 'Firefox'
+}
+const applicationName = applicationIdMap[Services.appinfo.ID] || Services.appinfo.name;
+
+var assertions = {}; Cu.import('resource://mozmill/modules/assertions.js', assertions);
+var broker = {}; Cu.import('resource://mozmill/driver/msgbroker.js', broker);
+var errors = {}; Cu.import('resource://mozmill/modules/errors.js', errors);
+
+var assert = new assertions.Assert();
+
+var hwindow = Services.appShell.hiddenDOMWindow;
+
+var uuidgen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
+
+function Copy (obj) {
+ for (var n in obj) {
+ this[n] = obj[n];
+ }
+}
+
+/**
+ * Returns the browser object of the specified window
+ *
+ * @param {Window} aWindow
+ * Window to get the browser element from.
+ *
+ * @returns {Object} The browser element
+ */
+function getBrowserObject(aWindow) {
+ return aWindow.gBrowser;
+}
+
+function getChromeWindow(aWindow) {
+ var chromeWin = aWindow.QueryInterface(Ci.nsIInterfaceRequestor)
+ .getInterface(Ci.nsIWebNavigation)
+ .QueryInterface(Ci.nsIDocShellTreeItem)
+ .rootTreeItem
+ .QueryInterface(Ci.nsIInterfaceRequestor)
+ .getInterface(Ci.nsIDOMWindow)
+ .QueryInterface(Ci.nsIDOMChromeWindow);
+
+ return chromeWin;
+}
+
+function getWindows(type) {
+ if (type == undefined) {
+ type = "";
+ }
+
+ var windows = [];
+ var enumerator = Services.wm.getEnumerator(type);
+
+ while (enumerator.hasMoreElements()) {
+ windows.push(enumerator.getNext());
+ }
+
+ if (type == "") {
+ windows.push(hwindow);
+ }
+
+ return windows;
+}
+
+function getMethodInWindows(methodName) {
+ for (var w of getWindows()) {
+ if (w[methodName] != undefined) {
+ return w[methodName];
+ }
+ }
+
+ throw new Error("Method with name: '" + methodName + "' is not in any open window.");
+}
+
+function getWindowByTitle(title) {
+ for (var w of getWindows()) {
+ if (w.document.title && w.document.title == title) {
+ return w;
+ }
+ }
+
+ throw new Error("Window with title: '" + title + "' not found.");
+}
+
+function getWindowByType(type) {
+ return Services.wm.getMostRecentWindow(type);
+}
+
+/**
+ * Retrieve the outer window id for the given window.
+ *
+ * @param {Number} aWindow
+ * Window to retrieve the id from.
+ * @returns {Boolean} The outer window id
+ **/
+function getWindowId(aWindow) {
+ try {
+ // Normally we can retrieve the id via window utils
+ return aWindow.QueryInterface(Ci.nsIInterfaceRequestor).
+ getInterface(Ci.nsIDOMWindowUtils).
+ outerWindowID;
+ } catch (e) {
+ // ... but for observer notifications we need another interface
+ return aWindow.QueryInterface(Ci.nsISupportsPRUint64).data;
+ }
+}
+
+var checkChrome = function () {
+ var loc = window.document.location.href;
+ try {
+ loc = window.top.document.location.href;
+ } catch (e) {
+ }
+
+ return /^chrome:\/\//.test(loc);
+}
+
+/**
+ * Called to get the state of an individual preference.
+ *
+ * @param aPrefName string The preference to get the state of.
+ * @param aDefaultValue any The default value if preference was not found.
+ *
+ * @returns any The value of the requested preference
+ *
+ * @see setPref
+ * Code by Henrik Skupin: <hskupin@gmail.com>
+ */
+function getPreference(aPrefName, aDefaultValue) {
+ try {
+ var branch = Services.prefs;
+
+ switch (typeof aDefaultValue) {
+ case ('boolean'):
+ return branch.getBoolPref(aPrefName);
+ case ('string'):
+ return branch.getCharPref(aPrefName);
+ case ('number'):
+ return branch.getIntPref(aPrefName);
+ default:
+ return branch.getComplexValue(aPrefName);
+ }
+ } catch (e) {
+ return aDefaultValue;
+ }
+}
+
+/**
+ * Called to set the state of an individual preference.
+ *
+ * @param aPrefName string The preference to set the state of.
+ * @param aValue any The value to set the preference to.
+ *
+ * @returns boolean Returns true if value was successfully set.
+ *
+ * @see getPref
+ * Code by Henrik Skupin: <hskupin@gmail.com>
+ */
+function setPreference(aName, aValue) {
+ try {
+ var branch = Services.prefs;
+
+ switch (typeof aValue) {
+ case ('boolean'):
+ branch.setBoolPref(aName, aValue);
+ break;
+ case ('string'):
+ branch.setCharPref(aName, aValue);
+ break;
+ case ('number'):
+ branch.setIntPref(aName, aValue);
+ break;
+ default:
+ branch.setComplexValue(aName, aValue);
+ }
+ } catch (e) {
+ return false;
+ }
+
+ return true;
+}
+
+/**
+ * Sleep for the given amount of milliseconds
+ *
+ * @param {number} milliseconds
+ * Sleeps the given number of milliseconds
+ */
+function sleep(milliseconds) {
+ var timeup = false;
+
+ hwindow.setTimeout(function () { timeup = true; }, milliseconds);
+ var thread = Services.tm.currentThread;
+
+ while (!timeup) {
+ thread.processNextEvent(true);
+ }
+
+ broker.pass({'function':'utils.sleep()'});
+}
+
+/**
+ * Check if the callback function evaluates to true
+ */
+function assert(callback, message, thisObject) {
+ var result = callback.call(thisObject);
+
+ if (!result) {
+ throw new Error(message || arguments.callee.name + ": Failed for '" + callback + "'");
+ }
+
+ return true;
+}
+
+/**
+ * Unwraps a node which is wrapped into a XPCNativeWrapper or XrayWrapper
+ *
+ * @param {DOMnode} Wrapped DOM node
+ * @returns {DOMNode} Unwrapped DOM node
+ */
+function unwrapNode(aNode) {
+ var node = aNode;
+ if (node) {
+ // unwrap is not available on older branches (3.5 and 3.6) - Bug 533596
+ if ("unwrap" in XPCNativeWrapper) {
+ node = XPCNativeWrapper.unwrap(node);
+ }
+ else if (node.wrappedJSObject != null) {
+ node = node.wrappedJSObject;
+ }
+ }
+
+ return node;
+}
+
+/**
+ * Waits for the callback evaluates to true
+ */
+function waitFor(callback, message, timeout, interval, thisObject) {
+ broker.log({'function': 'utils.waitFor() - DEPRECATED',
+ 'message': 'utils.waitFor() is deprecated. Use assert.waitFor() instead'});
+ assert.waitFor(callback, message, timeout, interval, thisObject);
+}
+
+/**
+ * Calculates the x and y chrome offset for an element
+ * See https://developer.mozilla.org/en/DOM/window.innerHeight
+ *
+ * Note this function will not work if the user has custom toolbars (via extension) at the bottom or left/right of the screen
+ */
+function getChromeOffset(elem) {
+ var win = elem.ownerDocument.defaultView;
+ // Calculate x offset
+ var chromeWidth = 0;
+
+ if (win["name"] != "sidebar") {
+ chromeWidth = win.outerWidth - win.innerWidth;
+ }
+
+ // Calculate y offset
+ var chromeHeight = win.outerHeight - win.innerHeight;
+ // chromeHeight == 0 means elem is already in the chrome and doesn't need the addonbar offset
+ if (chromeHeight > 0) {
+ // window.innerHeight doesn't include the addon or find bar, so account for these if present
+ var addonbar = win.document.getElementById("addon-bar");
+ if (addonbar) {
+ chromeHeight -= addonbar.scrollHeight;
+ }
+
+ var findbar = win.document.getElementById("FindToolbar");
+ if (findbar) {
+ chromeHeight -= findbar.scrollHeight;
+ }
+ }
+
+ return {'x':chromeWidth, 'y':chromeHeight};
+}
+
+/**
+ * Takes a screenshot of the specified DOM node
+ */
+function takeScreenshot(node, highlights) {
+ var rect, win, width, height, left, top, needsOffset;
+ // node can be either a window or an arbitrary DOM node
+ try {
+ // node is an arbitrary DOM node
+ win = node.ownerDocument.defaultView;
+ rect = node.getBoundingClientRect();
+ width = rect.width;
+ height = rect.height;
+ top = rect.top;
+ left = rect.left;
+ // offset for highlights not needed as they will be relative to this node
+ needsOffset = false;
+ } catch (e) {
+ // node is a window
+ win = node;
+ width = win.innerWidth;
+ height = win.innerHeight;
+ top = 0;
+ left = 0;
+ // offset needed for highlights to take 'outerHeight' of window into account
+ needsOffset = true;
+ }
+
+ var canvas = win.document.createElementNS("http://www.w3.org/1999/xhtml", "canvas");
+ canvas.width = width;
+ canvas.height = height;
+
+ var ctx = canvas.getContext("2d");
+ // Draws the DOM contents of the window to the canvas
+ ctx.drawWindow(win, left, top, width, height, "rgb(255,255,255)");
+
+ // This section is for drawing a red rectangle around each element passed in via the highlights array
+ if (highlights) {
+ ctx.lineWidth = "2";
+ ctx.strokeStyle = "red";
+ ctx.save();
+
+ for (var i = 0; i < highlights.length; ++i) {
+ var elem = highlights[i];
+ rect = elem.getBoundingClientRect();
+
+ var offsetY = 0, offsetX = 0;
+ if (needsOffset) {
+ var offset = getChromeOffset(elem);
+ offsetX = offset.x;
+ offsetY = offset.y;
+ } else {
+ // Don't need to offset the window chrome, just make relative to containing node
+ offsetY = -top;
+ offsetX = -left;
+ }
+
+ // Draw the rectangle
+ ctx.strokeRect(rect.left + offsetX, rect.top + offsetY, rect.width, rect.height);
+ }
+ }
+
+ return canvas.toDataURL("image/jpeg", 0.5);
+}
+
+/**
+ * Save the dataURL content to the specified file. It will be stored in either the persisted screenshot or temporary folder.
+ *
+ * @param {String} aDataURL
+ * The dataURL to save
+ * @param {String} aFilename
+ * Target file name without extension
+ *
+ * @returns {Object} The hash containing the path of saved file, and the failure bit
+ */
+function saveDataURL(aDataURL, aFilename) {
+ var frame = {}; Cu.import('resource://mozmill/modules/frame.js', frame);
+ const FILE_PERMISSIONS = parseInt("0644", 8);
+
+ var file;
+ file = Cc['@mozilla.org/file/local;1']
+ .createInstance(Ci.nsILocalFile);
+ file.initWithPath(frame.persisted['screenshots']['path']);
+ file.append(aFilename + ".jpg");
+ file.createUnique(Ci.nsIFile.NORMAL_FILE_TYPE, FILE_PERMISSIONS);
+
+ // Create an output stream to write to file
+ let foStream = Cc["@mozilla.org/network/file-output-stream;1"]
+ .createInstance(Ci.nsIFileOutputStream);
+ foStream.init(file, 0x02 | 0x08 | 0x10, FILE_PERMISSIONS, foStream.DEFER_OPEN);
+
+ let dataURI = NetUtil.newURI(aDataURL, "UTF8", null);
+ if (!dataURI.schemeIs("data")) {
+ throw TypeError("aDataURL parameter has to have 'data'" +
+ " scheme instead of '" + dataURI.scheme + "'");
+ }
+
+ // Write asynchronously to buffer;
+ // Input and output streams are closed after write
+
+ let ready = false;
+ let failure = false;
+
+ function sync(aStatus) {
+ if (!Components.isSuccessCode(aStatus)) {
+ failure = true;
+ }
+ ready = true;
+ }
+
+ NetUtil.asyncFetch(dataURI, function (aInputStream, aAsyncFetchResult) {
+ if (!Components.isSuccessCode(aAsyncFetchResult)) {
+ // An error occurred!
+ sync(aAsyncFetchResult);
+ } else {
+ // Consume the input stream.
+ NetUtil.asyncCopy(aInputStream, foStream, function (aAsyncCopyResult) {
+ sync(aAsyncCopyResult);
+ });
+ }
+ });
+
+ assert.waitFor(function () {
+ return ready;
+ }, "DataURL has been saved to '" + file.path + "'");
+
+ return {filename: file.path, failure: failure};
+}
+
+/**
+ * Some very brain-dead timer functions useful for performance optimizations
+ * This is only enabled in debug mode
+ *
+ **/
+var gutility_mzmltimer = 0;
+/**
+ * Starts timer initializing with current EPOC time in milliseconds
+ *
+ * @returns none
+ **/
+function startTimer(){
+ dump("TIMERCHECK:: starting now: " + Date.now() + "\n");
+ gutility_mzmltimer = Date.now();
+}
+
+/**
+ * Checks the timer and outputs current elapsed time since start of timer. It
+ * will print out a message you provide with its "time check" so you can
+ * correlate in the log file and figure out elapsed time of specific functions.
+ *
+ * @param aMsg string The debug message to print with the timer check
+ *
+ * @returns none
+ **/
+function checkTimer(aMsg){
+ var end = Date.now();
+ dump("TIMERCHECK:: at " + aMsg + " is: " + (end - gutility_mzmltimer) + "\n");
+}
diff --git a/services/sync/tps/extensions/mozmill/resource/stdlib/withs.js b/services/sync/tps/extensions/mozmill/resource/stdlib/withs.js
new file mode 100644
index 000000000..baa3d18d6
--- /dev/null
+++ b/services/sync/tps/extensions/mozmill/resource/stdlib/withs.js
@@ -0,0 +1,146 @@
+/*
+ Copyright (c) 2006 Lawrence Oluyede <l.oluyede@gmail.com>
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+*/
+
+/*
+ startsWith(str, prefix[, start[, end]]) -> bool
+
+ Return true if str ends with the specified prefix, false otherwise.
+ With optional start, test str beginning at that position.
+ With optional end, stop comparing str at that position.
+ prefix can also be an array of strings to try.
+*/
+
+var EXPORTED_SYMBOLS = ['startsWith', 'endsWith'];
+
+function startsWith(str, prefix, start, end) {
+ if (arguments.length < 2) {
+ throw new TypeError('startsWith() requires at least 2 arguments');
+ }
+
+ // check if start and end are null/undefined or a 'number'
+ if ((start == null) || (isNaN(new Number(start)))) {
+ start = 0;
+ }
+ if ((end == null) || (isNaN(new Number(end)))) {
+ end = Number.MAX_VALUE;
+ }
+
+ // if it's an array
+ if (typeof prefix == "object") {
+ for (var i = 0, j = prefix.length; i < j; i++) {
+ var res = _stringTailMatch(str, prefix[i], start, end, true);
+ if (res) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ return _stringTailMatch(str, prefix, start, end, true);
+}
+
+/*
+ endsWith(str, suffix[, start[, end]]) -> bool
+
+ Return true if str ends with the specified suffix, false otherwise.
+ With optional start, test str beginning at that position.
+ With optional end, stop comparing str at that position.
+ suffix can also be an array of strings to try.
+*/
+function endsWith(str, suffix, start, end) {
+ if (arguments.length < 2) {
+ throw new TypeError('endsWith() requires at least 2 arguments');
+ }
+
+ // check if start and end are null/undefined or a 'number'
+ if ((start == null) || (isNaN(new Number(start)))) {
+ start = 0;
+ }
+ if ((end == null) || (isNaN(new Number(end)))) {
+ end = Number.MAX_VALUE;
+ }
+
+ // if it's an array
+ if (typeof suffix == "object") {
+ for (var i = 0, j = suffix.length; i < j; i++) {
+ var res = _stringTailMatch(str, suffix[i], start, end, false);
+ if (res) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ return _stringTailMatch(str, suffix, start, end, false);
+}
+
+/*
+ Matches the end (direction == false) or start (direction == true) of str
+ against substr, using the start and end arguments. Returns false
+ if not found and true if found.
+*/
+function _stringTailMatch(str, substr, start, end, fromStart) {
+ var len = str.length;
+ var slen = substr.length;
+
+ var indices = _adjustIndices(start, end, len);
+ start = indices[0]; end = indices[1]; len = indices[2];
+
+ if (fromStart) {
+ if (start + slen > len) {
+ return false;
+ }
+ } else {
+ if (end - start < slen || start > len) {
+ return false;
+ }
+ if (end - slen > start) {
+ start = end - slen;
+ }
+ }
+
+ if (end - start >= slen) {
+ return str.substr(start, slen) == substr;
+ }
+ return false;
+}
+
+function _adjustIndices(start, end, len)
+{
+ if (end > len) {
+ end = len;
+ } else if (end < 0) {
+ end += len;
+ }
+
+ if (end < 0) {
+ end = 0;
+ }
+ if (start < 0) {
+ start += len;
+ }
+ if (start < 0) {
+ start = 0;
+ }
+
+ return [start, end, len];
+}
diff --git a/services/sync/tps/extensions/tps/chrome.manifest b/services/sync/tps/extensions/tps/chrome.manifest
new file mode 100644
index 000000000..4baf55677
--- /dev/null
+++ b/services/sync/tps/extensions/tps/chrome.manifest
@@ -0,0 +1,5 @@
+resource tps resource/
+
+component {4e5bd3f0-41d3-11df-9879-0800200c9a66} components/tps-cmdline.js
+contract @mozilla.org/commandlinehandler/general-startup;1?type=tps {4e5bd3f0-41d3-11df-9879-0800200c9a66}
+category command-line-handler m-tps @mozilla.org/commandlinehandler/general-startup;1?type=tps
diff --git a/services/sync/tps/extensions/tps/components/tps-cmdline.js b/services/sync/tps/extensions/tps/components/tps-cmdline.js
new file mode 100644
index 000000000..aaa9870ba
--- /dev/null
+++ b/services/sync/tps/extensions/tps/components/tps-cmdline.js
@@ -0,0 +1,150 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+const CC = Components.classes;
+const CI = Components.interfaces;
+
+const TPS_ID = "tps@mozilla.org";
+const TPS_CMDLINE_CONTRACTID = "@mozilla.org/commandlinehandler/general-startup;1?type=tps";
+const TPS_CMDLINE_CLSID = Components.ID('{4e5bd3f0-41d3-11df-9879-0800200c9a66}');
+const CATMAN_CONTRACTID = "@mozilla.org/categorymanager;1";
+const nsISupports = Components.interfaces.nsISupports;
+
+const nsICategoryManager = Components.interfaces.nsICategoryManager;
+const nsICmdLineHandler = Components.interfaces.nsICmdLineHandler;
+const nsICommandLine = Components.interfaces.nsICommandLine;
+const nsICommandLineHandler = Components.interfaces.nsICommandLineHandler;
+const nsIComponentRegistrar = Components.interfaces.nsIComponentRegistrar;
+const nsISupportsString = Components.interfaces.nsISupportsString;
+const nsIWindowWatcher = Components.interfaces.nsIWindowWatcher;
+
+Components.utils.import("resource://gre/modules/XPCOMUtils.jsm");
+
+function TPSCmdLineHandler() {}
+
+TPSCmdLineHandler.prototype = {
+ classDescription: "TPSCmdLineHandler",
+ classID : TPS_CMDLINE_CLSID,
+ contractID : TPS_CMDLINE_CONTRACTID,
+
+ QueryInterface: XPCOMUtils.generateQI([nsISupports,
+ nsICommandLineHandler,
+ nsICmdLineHandler]), /* nsISupports */
+
+ /* nsICmdLineHandler */
+ commandLineArgument : "-tps",
+ prefNameForStartup : "general.startup.tps",
+ helpText : "Run TPS tests with the given test file.",
+ handlesArgs : true,
+ defaultArgs : "",
+ openWindowWithArgs : true,
+
+ /* nsICommandLineHandler */
+ handle : function handler_handle(cmdLine) {
+ let options = {};
+
+ let uristr = cmdLine.handleFlagWithParam("tps", false);
+ if (uristr == null)
+ return;
+ let phase = cmdLine.handleFlagWithParam("tpsphase", false);
+ if (phase == null)
+ throw Error("must specify --tpsphase with --tps");
+ let logfile = cmdLine.handleFlagWithParam("tpslogfile", false);
+ if (logfile == null)
+ logfile = "";
+
+ options.ignoreUnusedEngines = cmdLine.handleFlag("ignore-unused-engines",
+ false);
+
+
+ /* Ignore the platform's online/offline status while running tests. */
+ var ios = Components.classes["@mozilla.org/network/io-service;1"]
+ .getService(Components.interfaces.nsIIOService2);
+ ios.manageOfflineStatus = false;
+ ios.offline = false;
+
+ Components.utils.import("resource://tps/tps.jsm");
+ Components.utils.import("resource://tps/quit.js", TPS);
+ let uri = cmdLine.resolveURI(uristr).spec;
+ TPS.RunTestPhase(uri, phase, logfile, options);
+
+ //cmdLine.preventDefault = true;
+ },
+
+ helpInfo : " --tps <file> Run TPS tests with the given test file.\n" +
+ " --tpsphase <phase> Run the specified phase in the TPS test.\n" +
+ " --tpslogfile <file> Logfile for TPS output.\n" +
+ " --ignore-unused-engines Don't load engines not used in tests.\n",
+};
+
+
+var TPSCmdLineFactory = {
+ createInstance : function(outer, iid) {
+ if (outer != null) {
+ throw new Error(Components.results.NS_ERROR_NO_AGGREGATION);
+ }
+
+ return new TPSCmdLineHandler().QueryInterface(iid);
+ }
+};
+
+
+var TPSCmdLineModule = {
+ registerSelf : function(compMgr, fileSpec, location, type) {
+ compMgr = compMgr.QueryInterface(nsIComponentRegistrar);
+
+ compMgr.registerFactoryLocation(TPS_CMDLINE_CLSID,
+ "TPS CommandLine Service",
+ TPS_CMDLINE_CONTRACTID,
+ fileSpec,
+ location,
+ type);
+
+ var catman = Components.classes[CATMAN_CONTRACTID].getService(nsICategoryManager);
+ catman.addCategoryEntry("command-line-argument-handlers",
+ "TPS command line handler",
+ TPS_CMDLINE_CONTRACTID, true, true);
+ catman.addCategoryEntry("command-line-handler",
+ "m-tps",
+ TPS_CMDLINE_CONTRACTID, true, true);
+ },
+
+ unregisterSelf : function(compMgr, fileSpec, location) {
+ compMgr = compMgr.QueryInterface(nsIComponentRegistrar);
+
+ compMgr.unregisterFactoryLocation(TPS_CMDLINE_CLSID, fileSpec);
+ catman = Components.classes[CATMAN_CONTRACTID].getService(nsICategoryManager);
+ catman.deleteCategoryEntry("command-line-argument-handlers",
+ "TPS command line handler", true);
+ catman.deleteCategoryEntry("command-line-handler",
+ "m-tps", true);
+ },
+
+ getClassObject : function(compMgr, cid, iid) {
+ if (cid.equals(TPS_CMDLINE_CLSID)) {
+ return TPSCmdLineFactory;
+ }
+
+ if (!iid.equals(Components.interfaces.nsIFactory)) {
+ throw new Error(Components.results.NS_ERROR_NOT_IMPLEMENTED);
+ }
+
+ throw new Error(Components.results.NS_ERROR_NO_INTERFACE);
+ },
+
+ canUnload : function(compMgr) {
+ return true;
+ }
+};
+
+/**
+* XPCOMUtils.generateNSGetFactory was introduced in Mozilla 2 (Firefox 4).
+* XPCOMUtils.generateNSGetModule is for Mozilla 1.9.2 (Firefox 3.6).
+*/
+if (XPCOMUtils.generateNSGetFactory)
+ var NSGetFactory = XPCOMUtils.generateNSGetFactory([TPSCmdLineHandler]);
+
+function NSGetModule(compMgr, fileSpec) {
+ return TPSCmdLineModule;
+}
diff --git a/services/sync/tps/extensions/tps/install.rdf b/services/sync/tps/extensions/tps/install.rdf
new file mode 100644
index 000000000..3dcdc5e44
--- /dev/null
+++ b/services/sync/tps/extensions/tps/install.rdf
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>tps@mozilla.org</em:id>
+ <em:version>0.5</em:version>
+
+ <em:targetApplication>
+ <!-- Firefox -->
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
+ <em:minVersion>24.0.*</em:minVersion>
+ <em:maxVersion>31.0.*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+
+ <!-- front-end metadata -->
+ <em:name>TPS</em:name>
+ <em:description>Sync test extension</em:description>
+ <em:creator>Jonathan Griffin</em:creator>
+ <em:contributor>Henrik Skupin</em:contributor>
+ <em:homepageURL>https://developer.mozilla.org/en-US/docs/TPS</em:homepageURL>
+ </Description>
+</RDF>
diff --git a/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm b/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm
new file mode 100644
index 000000000..86d0ed113
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm
@@ -0,0 +1,121 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "Authentication",
+];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/FxAccounts.jsm");
+Cu.import("resource://gre/modules/FxAccountsClient.jsm");
+Cu.import("resource://gre/modules/FxAccountsConfig.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/main.js");
+Cu.import("resource://tps/logger.jsm");
+
+
+/**
+ * Helper object for Firefox Accounts authentication
+ */
+var Authentication = {
+
+ /**
+ * Check if an user has been logged in
+ */
+ get isLoggedIn() {
+ return !!this.getSignedInUser();
+ },
+
+ /**
+ * Wrapper to retrieve the currently signed in user
+ *
+ * @returns Information about the currently signed in user
+ */
+ getSignedInUser: function getSignedInUser() {
+ let cb = Async.makeSpinningCallback();
+
+ fxAccounts.getSignedInUser().then(user => {
+ cb(null, user);
+ }, error => {
+ cb(error);
+ })
+
+ try {
+ return cb.wait();
+ } catch (error) {
+ Logger.logError("getSignedInUser() failed with: " + JSON.stringify(error));
+ throw error;
+ }
+ },
+
+ /**
+ * Wrapper to synchronize the login of a user
+ *
+ * @param account
+ * Account information of the user to login
+ * @param account.username
+ * The username for the account (utf8)
+ * @param account.password
+ * The user's password
+ */
+ signIn: function signIn(account) {
+ let cb = Async.makeSpinningCallback();
+
+ Logger.AssertTrue(account["username"], "Username has been found");
+ Logger.AssertTrue(account["password"], "Password has been found");
+
+ Logger.logInfo("Login user: " + account["username"]);
+
+ // Required here since we don't go through the real login page
+ Async.promiseSpinningly(FxAccountsConfig.ensureConfigured());
+
+ let client = new FxAccountsClient();
+ client.signIn(account["username"], account["password"], true).then(credentials => {
+ return fxAccounts.setSignedInUser(credentials);
+ }).then(() => {
+ cb(null, true);
+ }, error => {
+ cb(error, false);
+ });
+
+ try {
+ cb.wait();
+
+ if (Weave.Status.login !== Weave.LOGIN_SUCCEEDED) {
+ Logger.logInfo("Logging into Weave.");
+ Weave.Service.login();
+ Logger.AssertEqual(Weave.Status.login, Weave.LOGIN_SUCCEEDED,
+ "Weave logged in");
+ }
+
+ return true;
+ } catch (error) {
+ throw new Error("signIn() failed with: " + error.message);
+ }
+ },
+
+ /**
+ * Sign out of Firefox Accounts. It also clears out the device ID, if we find one.
+ */
+ signOut() {
+ if (Authentication.isLoggedIn) {
+ let user = Authentication.getSignedInUser();
+ if (!user) {
+ throw new Error("Failed to get signed in user!");
+ }
+ let fxc = new FxAccountsClient();
+ let { sessionToken, deviceId } = user;
+ if (deviceId) {
+ Logger.logInfo("Destroying device " + deviceId);
+ Async.promiseSpinningly(fxc.signOutAndDestroyDevice(sessionToken, deviceId, { service: "sync" }));
+ } else {
+ Logger.logError("No device found.");
+ Async.promiseSpinningly(fxc.signOut(sessionToken, { service: "sync" }));
+ }
+ }
+ }
+};
diff --git a/services/sync/tps/extensions/tps/resource/auth/sync.jsm b/services/sync/tps/extensions/tps/resource/auth/sync.jsm
new file mode 100644
index 000000000..35ffeb269
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/auth/sync.jsm
@@ -0,0 +1,88 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "Authentication",
+];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-sync/main.js");
+Cu.import("resource://tps/logger.jsm");
+
+
+/**
+ * Helper object for deprecated Firefox Sync authentication
+ */
+var Authentication = {
+
+ /**
+ * Check if an user has been logged in
+ */
+ get isLoggedIn() {
+ return !!this.getSignedInUser();
+ },
+
+ /**
+ * Wrapper to retrieve the currently signed in user
+ *
+ * @returns Information about the currently signed in user
+ */
+ getSignedInUser: function getSignedInUser() {
+ let user = null;
+
+ if (Weave.Service.isLoggedIn) {
+ user = {
+ email: Weave.Service.identity.account,
+ password: Weave.Service.identity.basicPassword,
+ passphrase: Weave.Service.identity.syncKey
+ };
+ }
+
+ return user;
+ },
+
+ /**
+ * Wrapper to synchronize the login of a user
+ *
+ * @param account
+ * Account information of the user to login
+ * @param account.username
+ * The username for the account (utf8)
+ * @param account.password
+ * The user's password
+ * @param account.passphrase
+ * The users's passphrase
+ */
+ signIn: function signIn(account) {
+ Logger.AssertTrue(account["username"], "Username has been found");
+ Logger.AssertTrue(account["password"], "Password has been found");
+ Logger.AssertTrue(account["passphrase"], "Passphrase has been found");
+
+ Logger.logInfo("Logging in user: " + account["username"]);
+
+ Weave.Service.identity.account = account["username"];
+ Weave.Service.identity.basicPassword = account["password"];
+ Weave.Service.identity.syncKey = account["passphrase"];
+
+ if (Weave.Status.login !== Weave.LOGIN_SUCCEEDED) {
+ Logger.logInfo("Logging into Weave.");
+ Weave.Service.login();
+ Logger.AssertEqual(Weave.Status.login, Weave.LOGIN_SUCCEEDED,
+ "Weave logged in");
+
+ // Bug 997279: Temporary workaround until we can ensure that Sync itself
+ // sends this notification for the first login attempt by TPS
+ Weave.Svc.Obs.notify("weave:service:setup-complete");
+ }
+
+ return true;
+ },
+
+ signOut() {
+ Weave.Service.logout();
+ }
+};
diff --git a/services/sync/tps/extensions/tps/resource/logger.jsm b/services/sync/tps/extensions/tps/resource/logger.jsm
new file mode 100644
index 000000000..f4dd4bfb0
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/logger.jsm
@@ -0,0 +1,148 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ Components.utils.import() and acts as a singleton.
+ Only the following listed symbols will exposed on import, and only when
+ and where imported. */
+
+var EXPORTED_SYMBOLS = ["Logger"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+var Logger = {
+ _foStream: null,
+ _converter: null,
+ _potentialError: null,
+
+ init: function (path) {
+ if (this._converter != null) {
+ // we're already open!
+ return;
+ }
+
+ let prefs = Cc["@mozilla.org/preferences-service;1"]
+ .getService(Ci.nsIPrefBranch);
+ if (path) {
+ prefs.setCharPref("tps.logfile", path);
+ }
+ else {
+ path = prefs.getCharPref("tps.logfile");
+ }
+
+ this._file = Cc["@mozilla.org/file/local;1"]
+ .createInstance(Ci.nsILocalFile);
+ this._file.initWithPath(path);
+ var exists = this._file.exists();
+
+ // Make a file output stream and converter to handle it.
+ this._foStream = Cc["@mozilla.org/network/file-output-stream;1"]
+ .createInstance(Ci.nsIFileOutputStream);
+ // If the file already exists, append it, otherwise create it.
+ var fileflags = exists ? 0x02 | 0x08 | 0x10 : 0x02 | 0x08 | 0x20;
+
+ this._foStream.init(this._file, fileflags, 0666, 0);
+ this._converter = Cc["@mozilla.org/intl/converter-output-stream;1"]
+ .createInstance(Ci.nsIConverterOutputStream);
+ this._converter.init(this._foStream, "UTF-8", 0, 0);
+ },
+
+ write: function (data) {
+ if (this._converter == null) {
+ Cu.reportError(
+ "TPS Logger.write called with _converter == null!");
+ return;
+ }
+ this._converter.writeString(data);
+ },
+
+ close: function () {
+ if (this._converter != null) {
+ this._converter.close();
+ this._converter = null;
+ this._foStream = null;
+ }
+ },
+
+ AssertTrue: function(bool, msg, showPotentialError) {
+ if (bool) {
+ return;
+ }
+
+ if (showPotentialError && this._potentialError) {
+ msg += "; " + this._potentialError;
+ this._potentialError = null;
+ }
+ throw new Error("ASSERTION FAILED! " + msg);
+ },
+
+ AssertFalse: function(bool, msg, showPotentialError) {
+ return this.AssertTrue(!bool, msg, showPotentialError);
+ },
+
+ AssertEqual: function(val1, val2, msg) {
+ if (val1 != val2)
+ throw new Error("ASSERTION FAILED! " + msg + "; expected " +
+ JSON.stringify(val2) + ", got " + JSON.stringify(val1));
+ },
+
+ log: function (msg, withoutPrefix) {
+ dump(msg + "\n");
+ if (withoutPrefix) {
+ this.write(msg + "\n");
+ }
+ else {
+ function pad(n, len) {
+ let s = "0000" + n;
+ return s.slice(-len);
+ }
+
+ let now = new Date();
+ let year = pad(now.getFullYear(), 4);
+ let month = pad(now.getMonth() + 1, 2);
+ let day = pad(now.getDate(), 2);
+ let hour = pad(now.getHours(), 2);
+ let minutes = pad(now.getMinutes(), 2);
+ let seconds = pad(now.getSeconds(), 2);
+ let ms = pad(now.getMilliseconds(), 3);
+
+ this.write(year + "-" + month + "-" + day + " " +
+ hour + ":" + minutes + ":" + seconds + "." + ms + " " +
+ msg + "\n");
+ }
+ },
+
+ clearPotentialError: function() {
+ this._potentialError = null;
+ },
+
+ logPotentialError: function(msg) {
+ this._potentialError = msg;
+ },
+
+ logLastPotentialError: function(msg) {
+ var message = msg;
+ if (this._potentialError) {
+ message = this._poentialError;
+ this._potentialError = null;
+ }
+ this.log("CROSSWEAVE ERROR: " + message);
+ },
+
+ logError: function (msg) {
+ this.log("CROSSWEAVE ERROR: " + msg);
+ },
+
+ logInfo: function (msg, withoutPrefix) {
+ if (withoutPrefix)
+ this.log(msg, true);
+ else
+ this.log("CROSSWEAVE INFO: " + msg);
+ },
+
+ logPass: function (msg) {
+ this.log("CROSSWEAVE TEST PASS: " + msg);
+ },
+};
+
diff --git a/services/sync/tps/extensions/tps/resource/modules/addons.jsm b/services/sync/tps/extensions/tps/resource/modules/addons.jsm
new file mode 100644
index 000000000..1570b42b1
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/addons.jsm
@@ -0,0 +1,127 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict";
+
+var EXPORTED_SYMBOLS = ["Addon", "STATE_ENABLED", "STATE_DISABLED"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/AddonManager.jsm");
+Cu.import("resource://gre/modules/addons/AddonRepository.jsm");
+Cu.import("resource://gre/modules/NetUtil.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/addonutils.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://tps/logger.jsm");
+
+const ADDONSGETURL = "http://127.0.0.1:4567/";
+const STATE_ENABLED = 1;
+const STATE_DISABLED = 2;
+
+function GetFileAsText(file) {
+ let channel = NetUtil.newChannel({
+ uri: file,
+ loadUsingSystemPrincipal: true
+ });
+ let inputStream = channel.open2();
+ if (channel instanceof Ci.nsIHttpChannel &&
+ channel.responseStatus != 200) {
+ return "";
+ }
+
+ let streamBuf = "";
+ let sis = Cc["@mozilla.org/scriptableinputstream;1"]
+ .createInstance(Ci.nsIScriptableInputStream);
+ sis.init(inputStream);
+
+ let available;
+ while ((available = sis.available()) != 0) {
+ streamBuf += sis.read(available);
+ }
+
+ inputStream.close();
+ return streamBuf;
+}
+
+function Addon(TPS, id) {
+ this.TPS = TPS;
+ this.id = id;
+}
+
+Addon.prototype = {
+ addon: null,
+
+ uninstall: function uninstall() {
+ // find our addon locally
+ let cb = Async.makeSyncCallback();
+ AddonManager.getAddonByID(this.id, cb);
+ let addon = Async.waitForSyncCallback(cb);
+
+ Logger.AssertTrue(!!addon, 'could not find addon ' + this.id + ' to uninstall');
+
+ cb = Async.makeSpinningCallback();
+ AddonUtils.uninstallAddon(addon, cb);
+ cb.wait();
+ },
+
+ find: function find(state) {
+ let cb = Async.makeSyncCallback();
+ AddonManager.getAddonByID(this.id, cb);
+ let addon = Async.waitForSyncCallback(cb);
+
+ if (!addon) {
+ Logger.logInfo("Could not find add-on with ID: " + this.id);
+ return false;
+ }
+
+ this.addon = addon;
+
+ Logger.logInfo("add-on found: " + addon.id + ", enabled: " +
+ !addon.userDisabled);
+ if (state == STATE_ENABLED) {
+ Logger.AssertFalse(addon.userDisabled, "add-on is disabled: " + addon.id);
+ return true;
+ } else if (state == STATE_DISABLED) {
+ Logger.AssertTrue(addon.userDisabled, "add-on is enabled: " + addon.id);
+ return true;
+ } else if (state) {
+ throw new Error("Don't know how to handle state: " + state);
+ } else {
+ // No state, so just checking that it exists.
+ return true;
+ }
+ },
+
+ install: function install() {
+ // For Install, the id parameter initially passed is really the filename
+ // for the addon's install .xml; we'll read the actual id from the .xml.
+
+ let cb = Async.makeSpinningCallback();
+ AddonUtils.installAddons([{id: this.id, requireSecureURI: false}], cb);
+ let result = cb.wait();
+
+ Logger.AssertEqual(1, result.installedIDs.length, "Exactly 1 add-on was installed.");
+ Logger.AssertEqual(this.id, result.installedIDs[0],
+ "Add-on was installed successfully: " + this.id);
+ },
+
+ setEnabled: function setEnabled(flag) {
+ Logger.AssertTrue(this.find(), "Add-on is available.");
+
+ let userDisabled;
+ if (flag == STATE_ENABLED) {
+ userDisabled = false;
+ } else if (flag == STATE_DISABLED) {
+ userDisabled = true;
+ } else {
+ throw new Error("Unknown flag to setEnabled: " + flag);
+ }
+
+ let cb = Async.makeSpinningCallback();
+ AddonUtils.updateUserDisabled(this.addon, userDisabled, cb);
+ cb.wait();
+
+ return true;
+ }
+};
diff --git a/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm b/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm
new file mode 100644
index 000000000..857c0c1e8
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm
@@ -0,0 +1,1001 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ * Components.utils.import() and acts as a singleton. Only the following
+ * listed symbols will exposed on import, and only when and where imported.
+ */
+
+var EXPORTED_SYMBOLS = ["PlacesItem", "Bookmark", "Separator", "Livemark",
+ "BookmarkFolder", "DumpBookmarks"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/PlacesBackups.jsm");
+Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://tps/logger.jsm");
+
+var DumpBookmarks = function TPS_Bookmarks__DumpBookmarks() {
+ let cb = Async.makeSpinningCallback();
+ PlacesBackups.getBookmarksTree().then(result => {
+ let [bookmarks, count] = result;
+ Logger.logInfo("Dumping Bookmarks...\n" + JSON.stringify(bookmarks) + "\n\n");
+ cb(null);
+ }).then(null, error => {
+ cb(error);
+ });
+ cb.wait();
+};
+
+/**
+ * extend, causes a child object to inherit from a parent
+ */
+function extend(child, supertype)
+{
+ child.prototype.__proto__ = supertype.prototype;
+}
+
+/**
+ * PlacesItemProps object, holds properties for places items
+ */
+function PlacesItemProps(props) {
+ this.location = null;
+ this.uri = null;
+ this.loadInSidebar = null;
+ this.keyword = null;
+ this.title = null;
+ this.description = null;
+ this.after = null;
+ this.before = null;
+ this.folder = null;
+ this.position = null;
+ this.delete = false;
+ this.siteUri = null;
+ this.feedUri = null;
+ this.livemark = null;
+ this.tags = null;
+ this.last_item_pos = null;
+ this.type = null;
+
+ for (var prop in props) {
+ if (prop in this)
+ this[prop] = props[prop];
+ }
+}
+
+/**
+ * PlacesItem object. Base class for places items.
+ */
+function PlacesItem(props) {
+ this.props = new PlacesItemProps(props);
+ if (this.props.location == null)
+ this.props.location = "menu";
+ if ("changes" in props)
+ this.updateProps = new PlacesItemProps(props.changes);
+ else
+ this.updateProps = null;
+}
+
+/**
+ * Instance methods for generic places items.
+ */
+PlacesItem.prototype = {
+ // an array of possible root folders for places items
+ _bookmarkFolders: {
+ "places": "placesRoot",
+ "menu": "bookmarksMenuFolder",
+ "tags": "tagFolder",
+ "unfiled": "unfiledBookmarksFolder",
+ "toolbar": "toolbarFolder",
+ },
+
+ toString: function() {
+ var that = this;
+ var props = ['uri', 'title', 'location', 'folder', 'feedUri', 'siteUri', 'livemark'];
+ var string = (this.props.type ? this.props.type + " " : "") +
+ "(" +
+ (function() {
+ var ret = [];
+ for (var i in props) {
+ if (that.props[props[i]]) {
+ ret.push(props[i] + ": " + that.props[props[i]])
+ }
+ }
+ return ret;
+ })().join(", ") + ")";
+ return string;
+ },
+
+ GetSyncId() {
+ let guid = Async.promiseSpinningly(PlacesUtils.promiseItemGuid(this.props.item_id));
+ return PlacesSyncUtils.bookmarks.guidToSyncId(guid);
+ },
+
+ /**
+ * GetPlacesNodeId
+ *
+ * Finds the id of the an item with the specified properties in the places
+ * database.
+ *
+ * @param folder The id of the folder to search
+ * @param type The type of the item to find, or null to match any item;
+ * this is one of the values listed at
+ * https://developer.mozilla.org/en/nsINavHistoryResultNode#Constants
+ * @param title The title of the item to find, or null to match any title
+ * @param uri The uri of the item to find, or null to match any uri
+ *
+ * @return the node id if the item was found, otherwise -1
+ */
+ GetPlacesNodeId: function (folder, type, title, uri) {
+ let node_id = -1;
+
+ let options = PlacesUtils.history.getNewQueryOptions();
+ let query = PlacesUtils.history.getNewQuery();
+ query.setFolders([folder], 1);
+ let result = PlacesUtils.history.executeQuery(query, options);
+ let rootNode = result.root;
+ rootNode.containerOpen = true;
+
+ for (let j = 0; j < rootNode.childCount; j ++) {
+ let node = rootNode.getChild(j);
+ if (node.title == title) {
+ if (type == null || type == undefined || node.type == type)
+ if (uri == undefined || uri == null || node.uri.spec == uri.spec)
+ node_id = node.itemId;
+ }
+ }
+ rootNode.containerOpen = false;
+
+ return node_id;
+ },
+
+ /**
+ * IsAdjacentTo
+ *
+ * Determines if this object is immediately adjacent to another.
+ *
+ * @param itemName The name of the other object; this may be any kind of
+ * places item
+ * @param relativePos The relative position of the other object. If -1,
+ * it means the other object should precede this one, if +1,
+ * the other object should come after this one
+ * @return true if this object is immediately adjacent to the other object,
+ * otherwise false
+ */
+ IsAdjacentTo: function(itemName, relativePos) {
+ Logger.AssertTrue(this.props.folder_id != -1 && this.props.item_id != -1,
+ "Either folder_id or item_id was invalid");
+ let other_id = this.GetPlacesNodeId(this.props.folder_id, null, itemName);
+ Logger.AssertTrue(other_id != -1, "item " + itemName + " not found");
+ let other_pos = PlacesUtils.bookmarks.getItemIndex(other_id);
+ let this_pos = PlacesUtils.bookmarks.getItemIndex(this.props.item_id);
+ if (other_pos + relativePos != this_pos) {
+ Logger.logPotentialError("Invalid position - " +
+ (this.props.title ? this.props.title : this.props.folder) +
+ " not " + (relativePos == 1 ? "after " : "before ") + itemName +
+ " for " + this.toString());
+ return false;
+ }
+ return true;
+ },
+
+ /**
+ * GetItemIndex
+ *
+ * Gets the item index for this places item.
+ *
+ * @return the item index, or -1 if there's an error
+ */
+ GetItemIndex: function() {
+ if (this.props.item_id == -1)
+ return -1;
+ return PlacesUtils.bookmarks.getItemIndex(this.props.item_id);
+ },
+
+ /**
+ * GetFolder
+ *
+ * Gets the folder id for the specified bookmark folder
+ *
+ * @param location The full path of the folder, which must begin
+ * with one of the bookmark root folders
+ * @return the folder id if the folder is found, otherwise -1
+ */
+ GetFolder: function(location) {
+ let folder_parts = location.split("/");
+ if (!(folder_parts[0] in this._bookmarkFolders)) {
+ return -1;
+ }
+ let folder_id = PlacesUtils.bookmarks[this._bookmarkFolders[folder_parts[0]]];
+ for (let i = 1; i < folder_parts.length; i++) {
+ let subfolder_id = this.GetPlacesNodeId(
+ folder_id,
+ Ci.nsINavHistoryResultNode.RESULT_TYPE_FOLDER,
+ folder_parts[i]);
+ if (subfolder_id == -1) {
+ return -1;
+ }
+ else {
+ folder_id = subfolder_id;
+ }
+ }
+ return folder_id;
+ },
+
+ /**
+ * CreateFolder
+ *
+ * Creates a bookmark folder.
+ *
+ * @param location The full path of the folder, which must begin
+ * with one of the bookmark root folders
+ * @return the folder id if the folder was created, otherwise -1
+ */
+ CreateFolder: function(location) {
+ let folder_parts = location.split("/");
+ if (!(folder_parts[0] in this._bookmarkFolders)) {
+ return -1;
+ }
+ let folder_id = PlacesUtils.bookmarks[this._bookmarkFolders[folder_parts[0]]];
+ for (let i = 1; i < folder_parts.length; i++) {
+ let subfolder_id = this.GetPlacesNodeId(
+ folder_id,
+ Ci.nsINavHistoryResultNode.RESULT_TYPE_FOLDER,
+ folder_parts[i]);
+ if (subfolder_id == -1) {
+ folder_id = PlacesUtils.bookmarks.createFolder(folder_id,
+ folder_parts[i], -1);
+ }
+ else {
+ folder_id = subfolder_id;
+ }
+ }
+ return folder_id;
+ },
+
+ /**
+ * GetOrCreateFolder
+ *
+ * Locates the specified folder; if not found it is created.
+ *
+ * @param location The full path of the folder, which must begin
+ * with one of the bookmark root folders
+ * @return the folder id if the folder was found or created, otherwise -1
+ */
+ GetOrCreateFolder: function(location) {
+ folder_id = this.GetFolder(location);
+ if (folder_id == -1)
+ folder_id = this.CreateFolder(location);
+ return folder_id;
+ },
+
+ /**
+ * CheckDescription
+ *
+ * Compares the description of this places item with an expected
+ * description.
+ *
+ * @param expectedDescription The description this places item is
+ * expected to have
+ * @return true if the actual and expected descriptions match, or if
+ * there is no expected description; otherwise false
+ */
+ CheckDescription: function(expectedDescription) {
+ if (expectedDescription != null) {
+ let description = "";
+ if (PlacesUtils.annotations.itemHasAnnotation(this.props.item_id,
+ "bookmarkProperties/description")) {
+ description = PlacesUtils.annotations.getItemAnnotation(
+ this.props.item_id, "bookmarkProperties/description");
+ }
+ if (description != expectedDescription) {
+ Logger.logPotentialError("Invalid description, expected: " +
+ expectedDescription + ", actual: " + description + " for " +
+ this.toString());
+ return false;
+ }
+ }
+ return true;
+ },
+
+ /**
+ * CheckPosition
+ *
+ * Verifies the position of this places item.
+ *
+ * @param before The name of the places item that this item should be
+ before, or null if this check should be skipped
+ * @param after The name of the places item that this item should be
+ after, or null if this check should be skipped
+ * @param last_item_pos The index of the places item above this one,
+ * or null if this check should be skipped
+ * @return true if this item is in the correct position, otherwise false
+ */
+ CheckPosition: function(before, after, last_item_pos) {
+ if (after)
+ if (!this.IsAdjacentTo(after, 1)) return false;
+ if (before)
+ if (!this.IsAdjacentTo(before, -1)) return false;
+ if (last_item_pos != null && last_item_pos > -1) {
+ if (this.GetItemIndex() != last_item_pos + 1) {
+ Logger.logPotentialError("Item not found at the expected index, got " +
+ this.GetItemIndex() + ", expected " + (last_item_pos + 1) + " for " +
+ this.toString());
+ return false;
+ }
+ }
+ return true;
+ },
+
+ /**
+ * SetLocation
+ *
+ * Moves this places item to a different folder.
+ *
+ * @param location The full path of the folder to which to move this
+ * places item, which must begin with one of the bookmark root
+ * folders; if null, no changes are made
+ * @return nothing if successful, otherwise an exception is thrown
+ */
+ SetLocation: function(location) {
+ if (location != null) {
+ let newfolder_id = this.GetOrCreateFolder(location);
+ Logger.AssertTrue(newfolder_id != -1, "Location " + location +
+ " doesn't exist; can't change item's location");
+ PlacesUtils.bookmarks.moveItem(this.props.item_id, newfolder_id, -1);
+ this.props.folder_id = newfolder_id;
+ }
+ },
+
+ /**
+ * SetDescription
+ *
+ * Updates the description for this places item.
+ *
+ * @param description The new description to set; if null, no changes are
+ * made
+ * @return nothing
+ */
+ SetDescription: function(description) {
+ if (description != null) {
+ if (description != "")
+ PlacesUtils.annotations.setItemAnnotation(this.props.item_id,
+ "bookmarkProperties/description",
+ description,
+ 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ else
+ PlacesUtils.annotations.removeItemAnnotation(this.props.item_id,
+ "bookmarkProperties/description");
+ }
+ },
+
+ /**
+ * SetPosition
+ *
+ * Updates the position of this places item within this item's current
+ * folder. Use SetLocation to change folders.
+ *
+ * @param position The new index this item should be moved to; if null,
+ * no changes are made; if -1, this item is moved to the bottom of
+ * the current folder
+ * @return nothing if successful, otherwise an exception is thrown
+ */
+ SetPosition: function(position) {
+ if (position != null) {
+ let newposition = -1;
+ if (position != -1) {
+ newposition = this.GetPlacesNodeId(this.props.folder_id,
+ null, position);
+ Logger.AssertTrue(newposition != -1, "position " + position +
+ " is invalid; unable to change position");
+ newposition = PlacesUtils.bookmarks.getItemIndex(newposition);
+ }
+ PlacesUtils.bookmarks.moveItem(this.props.item_id,
+ this.props.folder_id, newposition);
+ }
+ },
+
+ /**
+ * Update the title of this places item
+ *
+ * @param title The new title to set for this item; if null, no changes
+ * are made
+ * @return nothing
+ */
+ SetTitle: function(title) {
+ if (title != null) {
+ PlacesUtils.bookmarks.setItemTitle(this.props.item_id, title);
+ }
+ },
+};
+
+/**
+ * Bookmark class constructor. Initializes instance properties.
+ */
+function Bookmark(props) {
+ PlacesItem.call(this, props);
+ if (this.props.title == null)
+ this.props.title = this.props.uri;
+ this.props.type = "bookmark";
+}
+
+/**
+ * Bookmark instance methods.
+ */
+Bookmark.prototype = {
+ /**
+ * SetKeyword
+ *
+ * Update this bookmark's keyword.
+ *
+ * @param keyword The keyword to set for this bookmark; if null, no
+ * changes are made
+ * @return nothing
+ */
+ SetKeyword: function(keyword) {
+ if (keyword != null) {
+ // Mirror logic from PlacesSyncUtils's updateBookmarkMetadata
+ let entry = Async.promiseSpinningly(PlacesUtils.keywords.fetch({
+ url: this.props.uri,
+ }));
+ if (entry) {
+ Async.promiseSpinningly(PlacesUtils.keywords.remove(entry));
+ }
+ Async.promiseSpinningly(PlacesUtils.keywords.insert({
+ keyword: keyword,
+ url: this.props.uri
+ }));
+ }
+ },
+
+ /**
+ * SetLoadInSidebar
+ *
+ * Updates this bookmark's loadInSidebar property.
+ *
+ * @param loadInSidebar if true, the loadInSidebar property will be set,
+ * if false, it will be cleared, and any other value will result
+ * in no change
+ * @return nothing
+ */
+ SetLoadInSidebar: function(loadInSidebar) {
+ if (loadInSidebar == true)
+ PlacesUtils.annotations.setItemAnnotation(this.props.item_id,
+ "bookmarkProperties/loadInSidebar",
+ true,
+ 0,
+ PlacesUtils.annotations.EXPIRE_NEVER);
+ else if (loadInSidebar == false)
+ PlacesUtils.annotations.removeItemAnnotation(this.props.item_id,
+ "bookmarkProperties/loadInSidebar");
+ },
+
+ /**
+ * SetTitle
+ *
+ * Updates this bookmark's title.
+ *
+ * @param title The new title to set for this boomark; if null, no changes
+ * are made
+ * @return nothing
+ */
+ SetTitle: function(title) {
+ if (title)
+ PlacesUtils.bookmarks.setItemTitle(this.props.item_id, title);
+ },
+
+ /**
+ * SetUri
+ *
+ * Updates this bookmark's URI.
+ *
+ * @param uri The new URI to set for this boomark; if null, no changes
+ * are made
+ * @return nothing
+ */
+ SetUri: function(uri) {
+ if (uri) {
+ let newURI = Services.io.newURI(uri, null, null);
+ PlacesUtils.bookmarks.changeBookmarkURI(this.props.item_id, newURI);
+ }
+ },
+
+ /**
+ * SetTags
+ *
+ * Updates this bookmark's tags.
+ *
+ * @param tags An array of tags which should be associated with this
+ * bookmark; any previous tags are removed; if this param is null,
+ * no changes are made. If this param is an empty array, all
+ * tags are removed from this bookmark.
+ * @return nothing
+ */
+ SetTags: function(tags) {
+ if (tags != null) {
+ let URI = Services.io.newURI(this.props.uri, null, null);
+ PlacesUtils.tagging.untagURI(URI, null);
+ if (tags.length > 0)
+ PlacesUtils.tagging.tagURI(URI, tags);
+ }
+ },
+
+ /**
+ * Create
+ *
+ * Creates the bookmark described by this object's properties.
+ *
+ * @return the id of the created bookmark
+ */
+ Create: function() {
+ this.props.folder_id = this.GetOrCreateFolder(this.props.location);
+ Logger.AssertTrue(this.props.folder_id != -1, "Unable to create " +
+ "bookmark, error creating folder " + this.props.location);
+ let bookmarkURI = Services.io.newURI(this.props.uri, null, null);
+ this.props.item_id = PlacesUtils.bookmarks.insertBookmark(this.props.folder_id,
+ bookmarkURI,
+ -1,
+ this.props.title);
+ this.SetKeyword(this.props.keyword);
+ this.SetDescription(this.props.description);
+ this.SetLoadInSidebar(this.props.loadInSidebar);
+ this.SetTags(this.props.tags);
+ return this.props.item_id;
+ },
+
+ /**
+ * Update
+ *
+ * Updates this bookmark's properties according the properties on this
+ * object's 'updateProps' property.
+ *
+ * @return nothing
+ */
+ Update: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Remove");
+ this.SetDescription(this.updateProps.description);
+ this.SetLoadInSidebar(this.updateProps.loadInSidebar);
+ this.SetTitle(this.updateProps.title);
+ this.SetUri(this.updateProps.uri);
+ this.SetKeyword(this.updateProps.keyword);
+ this.SetTags(this.updateProps.tags);
+ this.SetLocation(this.updateProps.location);
+ this.SetPosition(this.updateProps.position);
+ },
+
+ /**
+ * Find
+ *
+ * Locates the bookmark which corresponds to this object's properties.
+ *
+ * @return the bookmark id if the bookmark was found, otherwise -1
+ */
+ Find: function() {
+ this.props.folder_id = this.GetFolder(this.props.location);
+ if (this.props.folder_id == -1) {
+ Logger.logError("Unable to find folder " + this.props.location);
+ return -1;
+ }
+ let bookmarkTitle = this.props.title;
+ this.props.item_id = this.GetPlacesNodeId(this.props.folder_id,
+ null,
+ bookmarkTitle,
+ this.props.uri);
+
+ if (this.props.item_id == -1) {
+ Logger.logPotentialError(this.toString() + " not found");
+ return -1;
+ }
+ if (!this.CheckDescription(this.props.description))
+ return -1;
+ if (this.props.keyword != null) {
+ let { keyword } = Async.promiseSpinningly(
+ PlacesSyncUtils.bookmarks.fetch(this.GetSyncId()));
+ if (keyword != this.props.keyword) {
+ Logger.logPotentialError("Incorrect keyword - expected: " +
+ this.props.keyword + ", actual: " + keyword +
+ " for " + this.toString());
+ return -1;
+ }
+ }
+ let loadInSidebar = PlacesUtils.annotations.itemHasAnnotation(
+ this.props.item_id,
+ "bookmarkProperties/loadInSidebar");
+ if (loadInSidebar)
+ loadInSidebar = PlacesUtils.annotations.getItemAnnotation(
+ this.props.item_id,
+ "bookmarkProperties/loadInSidebar");
+ if (this.props.loadInSidebar != null &&
+ loadInSidebar != this.props.loadInSidebar) {
+ Logger.logPotentialError("Incorrect loadInSidebar setting - expected: " +
+ this.props.loadInSidebar + ", actual: " + loadInSidebar +
+ " for " + this.toString());
+ return -1;
+ }
+ if (this.props.tags != null) {
+ try {
+ let URI = Services.io.newURI(this.props.uri, null, null);
+ let tags = PlacesUtils.tagging.getTagsForURI(URI, {});
+ tags.sort();
+ this.props.tags.sort();
+ if (JSON.stringify(tags) != JSON.stringify(this.props.tags)) {
+ Logger.logPotentialError("Wrong tags - expected: " +
+ JSON.stringify(this.props.tags) + ", actual: " +
+ JSON.stringify(tags) + " for " + this.toString());
+ return -1;
+ }
+ }
+ catch (e) {
+ Logger.logPotentialError("error processing tags " + e);
+ return -1;
+ }
+ }
+ if (!this.CheckPosition(this.props.before,
+ this.props.after,
+ this.props.last_item_pos))
+ return -1;
+ return this.props.item_id;
+ },
+
+ /**
+ * Remove
+ *
+ * Removes this bookmark. The bookmark should have been located previously
+ * by a call to Find.
+ *
+ * @return nothing
+ */
+ Remove: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Remove");
+ PlacesUtils.bookmarks.removeItem(this.props.item_id);
+ },
+};
+
+extend(Bookmark, PlacesItem);
+
+/**
+ * BookmarkFolder class constructor. Initializes instance properties.
+ */
+function BookmarkFolder(props) {
+ PlacesItem.call(this, props);
+ this.props.type = "folder";
+}
+
+/**
+ * BookmarkFolder instance methods
+ */
+BookmarkFolder.prototype = {
+ /**
+ * Create
+ *
+ * Creates the bookmark folder described by this object's properties.
+ *
+ * @return the id of the created bookmark folder
+ */
+ Create: function() {
+ this.props.folder_id = this.GetOrCreateFolder(this.props.location);
+ Logger.AssertTrue(this.props.folder_id != -1, "Unable to create " +
+ "folder, error creating parent folder " + this.props.location);
+ this.props.item_id = PlacesUtils.bookmarks.createFolder(this.props.folder_id,
+ this.props.folder,
+ -1);
+ this.SetDescription(this.props.description);
+ return this.props.folder_id;
+ },
+
+ /**
+ * Find
+ *
+ * Locates the bookmark folder which corresponds to this object's
+ * properties.
+ *
+ * @return the folder id if the folder was found, otherwise -1
+ */
+ Find: function() {
+ this.props.folder_id = this.GetFolder(this.props.location);
+ if (this.props.folder_id == -1) {
+ Logger.logError("Unable to find folder " + this.props.location);
+ return -1;
+ }
+ this.props.item_id = this.GetPlacesNodeId(
+ this.props.folder_id,
+ Ci.nsINavHistoryResultNode.RESULT_TYPE_FOLDER,
+ this.props.folder);
+ if (!this.CheckDescription(this.props.description))
+ return -1;
+ if (!this.CheckPosition(this.props.before,
+ this.props.after,
+ this.props.last_item_pos))
+ return -1;
+ return this.props.item_id;
+ },
+
+ /**
+ * Remove
+ *
+ * Removes this folder. The folder should have been located previously
+ * by a call to Find.
+ *
+ * @return nothing
+ */
+ Remove: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Remove");
+ PlacesUtils.bookmarks.removeFolderChildren(this.props.item_id);
+ PlacesUtils.bookmarks.removeItem(this.props.item_id);
+ },
+
+ /**
+ * Update
+ *
+ * Updates this bookmark's properties according the properties on this
+ * object's 'updateProps' property.
+ *
+ * @return nothing
+ */
+ Update: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Update");
+ this.SetLocation(this.updateProps.location);
+ this.SetPosition(this.updateProps.position);
+ this.SetTitle(this.updateProps.folder);
+ this.SetDescription(this.updateProps.description);
+ },
+};
+
+extend(BookmarkFolder, PlacesItem);
+
+/**
+ * Livemark class constructor. Initialzes instance properties.
+ */
+function Livemark(props) {
+ PlacesItem.call(this, props);
+ this.props.type = "livemark";
+}
+
+/**
+ * Livemark instance methods
+ */
+Livemark.prototype = {
+ /**
+ * Create
+ *
+ * Creates the livemark described by this object's properties.
+ *
+ * @return the id of the created livemark
+ */
+ Create: function() {
+ this.props.folder_id = this.GetOrCreateFolder(this.props.location);
+ Logger.AssertTrue(this.props.folder_id != -1, "Unable to create " +
+ "folder, error creating parent folder " + this.props.location);
+ let siteURI = null;
+ if (this.props.siteUri != null)
+ siteURI = Services.io.newURI(this.props.siteUri, null, null);
+ let livemarkObj = {parentId: this.props.folder_id,
+ title: this.props.livemark,
+ siteURI: siteURI,
+ feedURI: Services.io.newURI(this.props.feedUri, null, null),
+ index: PlacesUtils.bookmarks.DEFAULT_INDEX};
+
+ // Until this can handle asynchronous creation, we need to spin.
+ let spinningCb = Async.makeSpinningCallback();
+
+ PlacesUtils.livemarks.addLivemark(livemarkObj).then(
+ aLivemark => { spinningCb(null, [Components.results.NS_OK, aLivemark]) },
+ () => { spinningCb(null, [Components.results.NS_ERROR_UNEXPECTED, aLivemark]) }
+ );
+
+ let [status, livemark] = spinningCb.wait();
+ if (!Components.isSuccessCode(status)) {
+ throw new Error(status);
+ }
+
+ this.props.item_id = livemark.id;
+ return this.props.item_id;
+ },
+
+ /**
+ * Find
+ *
+ * Locates the livemark which corresponds to this object's
+ * properties.
+ *
+ * @return the item id if the livemark was found, otherwise -1
+ */
+ Find: function() {
+ this.props.folder_id = this.GetFolder(this.props.location);
+ if (this.props.folder_id == -1) {
+ Logger.logError("Unable to find folder " + this.props.location);
+ return -1;
+ }
+ this.props.item_id = this.GetPlacesNodeId(
+ this.props.folder_id,
+ Ci.nsINavHistoryResultNode.RESULT_TYPE_FOLDER,
+ this.props.livemark);
+ if (!PlacesUtils.annotations
+ .itemHasAnnotation(this.props.item_id, PlacesUtils.LMANNO_FEEDURI)) {
+ Logger.logPotentialError("livemark folder found, but it's just a regular folder, for " +
+ this.toString());
+ this.props.item_id = -1;
+ return -1;
+ }
+ let feedURI = Services.io.newURI(this.props.feedUri, null, null);
+ let lmFeedURISpec =
+ PlacesUtils.annotations.getItemAnnotation(this.props.item_id,
+ PlacesUtils.LMANNO_FEEDURI);
+ if (feedURI.spec != lmFeedURISpec) {
+ Logger.logPotentialError("livemark feed uri not correct, expected: " +
+ this.props.feedUri + ", actual: " + lmFeedURISpec +
+ " for " + this.toString());
+ return -1;
+ }
+ if (this.props.siteUri != null) {
+ let siteURI = Services.io.newURI(this.props.siteUri, null, null);
+ let lmSiteURISpec =
+ PlacesUtils.annotations.getItemAnnotation(this.props.item_id,
+ PlacesUtils.LMANNO_SITEURI);
+ if (siteURI.spec != lmSiteURISpec) {
+ Logger.logPotentialError("livemark site uri not correct, expected: " +
+ this.props.siteUri + ", actual: " + lmSiteURISpec + " for " +
+ this.toString());
+ return -1;
+ }
+ }
+ if (!this.CheckPosition(this.props.before,
+ this.props.after,
+ this.props.last_item_pos))
+ return -1;
+ return this.props.item_id;
+ },
+
+ /**
+ * Update
+ *
+ * Updates this livemark's properties according the properties on this
+ * object's 'updateProps' property.
+ *
+ * @return nothing
+ */
+ Update: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Update");
+ this.SetLocation(this.updateProps.location);
+ this.SetPosition(this.updateProps.position);
+ this.SetTitle(this.updateProps.livemark);
+ return true;
+ },
+
+ /**
+ * Remove
+ *
+ * Removes this livemark. The livemark should have been located previously
+ * by a call to Find.
+ *
+ * @return nothing
+ */
+ Remove: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Remove");
+ PlacesUtils.bookmarks.removeItem(this.props.item_id);
+ },
+};
+
+extend(Livemark, PlacesItem);
+
+/**
+ * Separator class constructor. Initializes instance properties.
+ */
+function Separator(props) {
+ PlacesItem.call(this, props);
+ this.props.type = "separator";
+}
+
+/**
+ * Separator instance methods.
+ */
+Separator.prototype = {
+ /**
+ * Create
+ *
+ * Creates the bookmark separator described by this object's properties.
+ *
+ * @return the id of the created separator
+ */
+ Create: function () {
+ this.props.folder_id = this.GetOrCreateFolder(this.props.location);
+ Logger.AssertTrue(this.props.folder_id != -1, "Unable to create " +
+ "folder, error creating parent folder " + this.props.location);
+ this.props.item_id = PlacesUtils.bookmarks.insertSeparator(this.props.folder_id,
+ -1);
+ return this.props.item_id;
+ },
+
+ /**
+ * Find
+ *
+ * Locates the bookmark separator which corresponds to this object's
+ * properties.
+ *
+ * @return the item id if the separator was found, otherwise -1
+ */
+ Find: function () {
+ this.props.folder_id = this.GetFolder(this.props.location);
+ if (this.props.folder_id == -1) {
+ Logger.logError("Unable to find folder " + this.props.location);
+ return -1;
+ }
+ if (this.props.before == null && this.props.last_item_pos == null) {
+ Logger.logPotentialError("Separator requires 'before' attribute if it's the" +
+ "first item in the list");
+ return -1;
+ }
+ let expected_pos = -1;
+ if (this.props.before) {
+ other_id = this.GetPlacesNodeId(this.props.folder_id,
+ null,
+ this.props.before);
+ if (other_id == -1) {
+ Logger.logPotentialError("Can't find places item " + this.props.before +
+ " for locating separator");
+ return -1;
+ }
+ expected_pos = PlacesUtils.bookmarks.getItemIndex(other_id) - 1;
+ }
+ else {
+ expected_pos = this.props.last_item_pos + 1;
+ }
+ this.props.item_id = PlacesUtils.bookmarks.getIdForItemAt(this.props.folder_id,
+ expected_pos);
+ if (this.props.item_id == -1) {
+ Logger.logPotentialError("No separator found at position " + expected_pos);
+ }
+ else {
+ if (PlacesUtils.bookmarks.getItemType(this.props.item_id) !=
+ PlacesUtils.bookmarks.TYPE_SEPARATOR) {
+ Logger.logPotentialError("Places item at position " + expected_pos +
+ " is not a separator");
+ return -1;
+ }
+ }
+ return this.props.item_id;
+ },
+
+ /**
+ * Update
+ *
+ * Updates this separator's properties according the properties on this
+ * object's 'updateProps' property.
+ *
+ * @return nothing
+ */
+ Update: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Update");
+ this.SetLocation(this.updateProps.location);
+ this.SetPosition(this.updateProps.position);
+ return true;
+ },
+
+ /**
+ * Remove
+ *
+ * Removes this separator. The separator should have been located
+ * previously by a call to Find.
+ *
+ * @return nothing
+ */
+ Remove: function() {
+ Logger.AssertTrue(this.props.item_id != -1 && this.props.item_id != null,
+ "Invalid item_id during Update");
+ PlacesUtils.bookmarks.removeItem(this.props.item_id);
+ },
+};
+
+extend(Separator, PlacesItem);
diff --git a/services/sync/tps/extensions/tps/resource/modules/forms.jsm b/services/sync/tps/extensions/tps/resource/modules/forms.jsm
new file mode 100644
index 000000000..deb1a28a5
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/forms.jsm
@@ -0,0 +1,219 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ Components.utils.import() and acts as a singleton. Only the following
+ listed symbols will exposed on import, and only when and where imported.
+ */
+
+var EXPORTED_SYMBOLS = ["FormData"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://tps/logger.jsm");
+
+Cu.import("resource://gre/modules/FormHistory.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+
+/**
+ * FormDB
+ *
+ * Helper object containing methods to interact with the FormHistory module.
+ */
+var FormDB = {
+ _update(data) {
+ return new Promise((resolve, reject) => {
+ let handlers = {
+ handleError(error) {
+ Logger.logError("Error occurred updating form history: " + Log.exceptionStr(error));
+ reject(error);
+ },
+ handleCompletion(reason) {
+ resolve();
+ }
+ }
+ FormHistory.update(data, handlers);
+ });
+ },
+
+ /**
+ * insertValue
+ *
+ * Adds the specified value for the specified fieldname into form history.
+ *
+ * @param fieldname The form fieldname to insert
+ * @param value The form value to insert
+ * @param us The time, in microseconds, to use for the lastUsed
+ * and firstUsed columns
+ * @return Promise<undefined>
+ */
+ insertValue(fieldname, value, us) {
+ let data = { op: "add", fieldname, value, timesUsed: 1,
+ firstUsed: us, lastUsed: us }
+ return this._update(data);
+ },
+
+ /**
+ * updateValue
+ *
+ * Updates a row in the moz_formhistory table with a new value.
+ *
+ * @param id The id of the row to update
+ * @param newvalue The new value to set
+ * @return Promise<undefined>
+ */
+ updateValue(id, newvalue) {
+ return this._update({ op: "update", guid: id, value: newvalue });
+ },
+
+ /**
+ * getDataForValue
+ *
+ * Retrieves a set of values for a row in the database that
+ * corresponds to the given fieldname and value.
+ *
+ * @param fieldname The fieldname of the row to query
+ * @param value The value of the row to query
+ * @return Promise<null if no row is found with the specified fieldname and value,
+ * or an object containing the row's guid, lastUsed, and firstUsed
+ * values>
+ */
+ getDataForValue(fieldname, value) {
+ return new Promise((resolve, reject) => {
+ let result = null;
+ let handlers = {
+ handleResult(oneResult) {
+ if (result != null) {
+ reject("more than 1 result for this query");
+ return;
+ }
+ result = oneResult;
+ },
+ handleError(error) {
+ Logger.logError("Error occurred updating form history: " + Log.exceptionStr(error));
+ reject(error);
+ },
+ handleCompletion(reason) {
+ resolve(result);
+ }
+ }
+ FormHistory.search(["guid", "lastUsed", "firstUsed"], { fieldname }, handlers);
+ });
+ },
+
+ /**
+ * remove
+ *
+ * Removes the specified GUID from the database.
+ *
+ * @param guid The guid of the item to delete
+ * @return Promise<>
+ */
+ remove(guid) {
+ return this._update({ op: "remove", guid });
+ },
+};
+
+/**
+ * FormData class constructor
+ *
+ * Initializes instance properties.
+ */
+function FormData(props, usSinceEpoch) {
+ this.fieldname = null;
+ this.value = null;
+ this.date = 0;
+ this.newvalue = null;
+ this.usSinceEpoch = usSinceEpoch;
+
+ for (var prop in props) {
+ if (prop in this)
+ this[prop] = props[prop];
+ }
+}
+
+/**
+ * FormData instance methods
+ */
+FormData.prototype = {
+ /**
+ * hours_to_us
+ *
+ * Converts hours since present to microseconds since epoch.
+ *
+ * @param hours The number of hours since the present time (e.g., 0 is
+ * 'now', and -1 is 1 hour ago)
+ * @return the corresponding number of microseconds since the epoch
+ */
+ hours_to_us: function(hours) {
+ return this.usSinceEpoch + (hours * 60 * 60 * 1000 * 1000);
+ },
+
+ /**
+ * Create
+ *
+ * If this FormData object doesn't exist in the moz_formhistory database,
+ * add it. Throws on error.
+ *
+ * @return nothing
+ */
+ Create: function() {
+ Logger.AssertTrue(this.fieldname != null && this.value != null,
+ "Must specify both fieldname and value");
+
+ return FormDB.getDataForValue(this.fieldname, this.value).then(formdata => {
+ if (!formdata) {
+ // this item doesn't exist yet in the db, so we need to insert it
+ return FormDB.insertValue(this.fieldname, this.value,
+ this.hours_to_us(this.date));
+ } else {
+ /* Right now, we ignore this case. If bug 552531 is ever fixed,
+ we might need to add code here to update the firstUsed or
+ lastUsed fields, as appropriate.
+ */
+ }
+ });
+ },
+
+ /**
+ * Find
+ *
+ * Attempts to locate an entry in the moz_formhistory database that
+ * matches the fieldname and value for this FormData object.
+ *
+ * @return true if this entry exists in the database, otherwise false
+ */
+ Find: function() {
+ return FormDB.getDataForValue(this.fieldname, this.value).then(formdata => {
+ let status = formdata != null;
+ if (status) {
+ /*
+ //form history dates currently not synced! bug 552531
+ let us = this.hours_to_us(this.date);
+ status = Logger.AssertTrue(
+ us >= formdata.firstUsed && us <= formdata.lastUsed,
+ "No match for with that date value");
+
+ if (status)
+ */
+ this.id = formdata.guid;
+ }
+ return status;
+ });
+ },
+
+ /**
+ * Remove
+ *
+ * Removes the row represented by this FormData instance from the
+ * moz_formhistory database.
+ *
+ * @return nothing
+ */
+ Remove: function() {
+ /* Right now Weave doesn't handle this correctly, see bug 568363.
+ */
+ return FormDB.remove(this.id);
+ },
+};
diff --git a/services/sync/tps/extensions/tps/resource/modules/history.jsm b/services/sync/tps/extensions/tps/resource/modules/history.jsm
new file mode 100644
index 000000000..78deb42ab
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/history.jsm
@@ -0,0 +1,207 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ * Components.utils.import() and acts as a singleton. Only the following
+ * listed symbols will exposed on import, and only when and where imported.
+ */
+
+var EXPORTED_SYMBOLS = ["HistoryEntry", "DumpHistory"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://tps/logger.jsm");
+Cu.import("resource://services-common/async.js");
+
+var DumpHistory = function TPS_History__DumpHistory() {
+ let writer = {
+ value: "",
+ write: function PlacesItem__dump__write(aStr, aLen) {
+ this.value += aStr;
+ }
+ };
+
+ let query = PlacesUtils.history.getNewQuery();
+ let options = PlacesUtils.history.getNewQueryOptions();
+ let root = PlacesUtils.history.executeQuery(query, options).root;
+ root.containerOpen = true;
+ Logger.logInfo("\n\ndumping history\n", true);
+ for (var i = 0; i < root.childCount; i++) {
+ let node = root.getChild(i);
+ let uri = node.uri;
+ let curvisits = HistoryEntry._getVisits(uri);
+ for (var visit of curvisits) {
+ Logger.logInfo("URI: " + uri + ", type=" + visit.type + ", date=" + visit.date, true);
+ }
+ }
+ root.containerOpen = false;
+ Logger.logInfo("\nend history dump\n", true);
+};
+
+/**
+ * HistoryEntry object
+ *
+ * Contains methods for manipulating browser history entries.
+ */
+var HistoryEntry = {
+ /**
+ * _db
+ *
+ * Returns the DBConnection object for the history service.
+ */
+ get _db() {
+ return PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase).DBConnection;
+ },
+
+ /**
+ * _visitStm
+ *
+ * Return the SQL statement for getting history visit information
+ * from the moz_historyvisits table. Borrowed from Weave's
+ * history.js.
+ */
+ get _visitStm() {
+ let stm = this._db.createStatement(
+ "SELECT visit_type type, visit_date date " +
+ "FROM moz_historyvisits " +
+ "WHERE place_id = (" +
+ "SELECT id " +
+ "FROM moz_places " +
+ "WHERE url_hash = hash(:url) AND url = :url) " +
+ "ORDER BY date DESC LIMIT 20");
+ this.__defineGetter__("_visitStm", () => stm);
+ return stm;
+ },
+
+ /**
+ * _getVisits
+ *
+ * Gets history information about visits to a given uri.
+ *
+ * @param uri The uri to get visits for
+ * @return an array of objects with 'date' and 'type' properties,
+ * corresponding to the visits in the history database for the
+ * given uri
+ */
+ _getVisits: function HistStore__getVisits(uri) {
+ this._visitStm.params.url = uri;
+ return Async.querySpinningly(this._visitStm, ["date", "type"]);
+ },
+
+ /**
+ * Add
+ *
+ * Adds visits for a uri to the history database. Throws on error.
+ *
+ * @param item An object representing one or more visits to a specific uri
+ * @param usSinceEpoch The number of microseconds from Epoch to
+ * the time the current Crossweave run was started
+ * @return nothing
+ */
+ Add: function(item, usSinceEpoch) {
+ Logger.AssertTrue("visits" in item && "uri" in item,
+ "History entry in test file must have both 'visits' " +
+ "and 'uri' properties");
+ let uri = Services.io.newURI(item.uri, null, null);
+ let place = {
+ uri: uri,
+ visits: []
+ };
+ for (let visit of item.visits) {
+ place.visits.push({
+ visitDate: usSinceEpoch + (visit.date * 60 * 60 * 1000 * 1000),
+ transitionType: visit.type
+ });
+ }
+ if ("title" in item) {
+ place.title = item.title;
+ }
+ let cb = Async.makeSpinningCallback();
+ PlacesUtils.asyncHistory.updatePlaces(place, {
+ handleError: function Add_handleError() {
+ cb(new Error("Error adding history entry"));
+ },
+ handleResult: function Add_handleResult() {
+ cb();
+ },
+ handleCompletion: function Add_handleCompletion() {
+ // Nothing to do
+ }
+ });
+ // Spin the event loop to embed this async call in a sync API
+ cb.wait();
+ },
+
+ /**
+ * Find
+ *
+ * Finds visits for a uri to the history database. Throws on error.
+ *
+ * @param item An object representing one or more visits to a specific uri
+ * @param usSinceEpoch The number of microseconds from Epoch to
+ * the time the current Crossweave run was started
+ * @return true if all the visits for the uri are found, otherwise false
+ */
+ Find: function(item, usSinceEpoch) {
+ Logger.AssertTrue("visits" in item && "uri" in item,
+ "History entry in test file must have both 'visits' " +
+ "and 'uri' properties");
+ let curvisits = this._getVisits(item.uri);
+ for (let visit of curvisits) {
+ for (let itemvisit of item.visits) {
+ let expectedDate = itemvisit.date * 60 * 60 * 1000 * 1000
+ + usSinceEpoch;
+ if (visit.type == itemvisit.type && visit.date == expectedDate) {
+ itemvisit.found = true;
+ }
+ }
+ }
+
+ let all_items_found = true;
+ for (let itemvisit of item.visits) {
+ all_items_found = all_items_found && "found" in itemvisit;
+ Logger.logInfo("History entry for " + item.uri + ", type:" +
+ itemvisit.type + ", date:" + itemvisit.date +
+ ("found" in itemvisit ? " is present" : " is not present"));
+ }
+ return all_items_found;
+ },
+
+ /**
+ * Delete
+ *
+ * Removes visits from the history database. Throws on error.
+ *
+ * @param item An object representing items to delete
+ * @param usSinceEpoch The number of microseconds from Epoch to
+ * the time the current Crossweave run was started
+ * @return nothing
+ */
+ Delete: function(item, usSinceEpoch) {
+ if ("uri" in item) {
+ let uri = Services.io.newURI(item.uri, null, null);
+ PlacesUtils.history.removePage(uri);
+ }
+ else if ("host" in item) {
+ PlacesUtils.history.removePagesFromHost(item.host, false);
+ }
+ else if ("begin" in item && "end" in item) {
+ let cb = Async.makeSpinningCallback();
+ let msSinceEpoch = parseInt(usSinceEpoch / 1000);
+ let filter = {
+ beginDate: new Date(msSinceEpoch + (item.begin * 60 * 60 * 1000)),
+ endDate: new Date(msSinceEpoch + (item.end * 60 * 60 * 1000))
+ };
+ PlacesUtils.history.removeVisitsByFilter(filter)
+ .catch(ex => Logger.AssertTrue(false, "An error occurred while deleting history: " + ex))
+ .then(result => {cb(null, result)}, err => {cb(err)});
+ Async.waitForSyncCallback(cb);
+ }
+ else {
+ Logger.AssertTrue(false, "invalid entry in delete history");
+ }
+ },
+};
diff --git a/services/sync/tps/extensions/tps/resource/modules/passwords.jsm b/services/sync/tps/extensions/tps/resource/modules/passwords.jsm
new file mode 100644
index 000000000..a84800bab
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/passwords.jsm
@@ -0,0 +1,163 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ * Components.utils.import() and acts as a singleton. Only the following
+ * listed symbols will exposed on import, and only when and where imported.
+ */
+
+var EXPORTED_SYMBOLS = ["Password", "DumpPasswords"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://tps/logger.jsm");
+
+var nsLoginInfo = new Components.Constructor(
+ "@mozilla.org/login-manager/loginInfo;1",
+ Ci.nsILoginInfo,
+ "init");
+
+var DumpPasswords = function TPS__Passwords__DumpPasswords() {
+ let logins = Services.logins.getAllLogins();
+ Logger.logInfo("\ndumping password list\n", true);
+ for (var i = 0; i < logins.length; i++) {
+ Logger.logInfo("* host=" + logins[i].hostname + ", submitURL=" + logins[i].formSubmitURL +
+ ", realm=" + logins[i].httpRealm + ", password=" + logins[i].password +
+ ", passwordField=" + logins[i].passwordField + ", username=" +
+ logins[i].username + ", usernameField=" + logins[i].usernameField, true);
+ }
+ Logger.logInfo("\n\nend password list\n", true);
+};
+
+/**
+ * PasswordProps object; holds password properties.
+ */
+function PasswordProps(props) {
+ this.hostname = null;
+ this.submitURL = null;
+ this.realm = null;
+ this.username = "";
+ this.password = "";
+ this.usernameField = "";
+ this.passwordField = "";
+ this.delete = false;
+
+ for (var prop in props) {
+ if (prop in this)
+ this[prop] = props[prop];
+ }
+}
+
+/**
+ * Password class constructor. Initializes instance properties.
+ */
+function Password(props) {
+ this.props = new PasswordProps(props);
+ if ("changes" in props) {
+ this.updateProps = new PasswordProps(props);
+ for (var prop in props.changes)
+ if (prop in this.updateProps)
+ this.updateProps[prop] = props.changes[prop];
+ }
+ else {
+ this.updateProps = null;
+ }
+}
+
+/**
+ * Password instance methods.
+ */
+Password.prototype = {
+ /**
+ * Create
+ *
+ * Adds a password entry to the login manager for the password
+ * represented by this object's properties. Throws on error.
+ *
+ * @return the new login guid
+ */
+ Create: function() {
+ let login = new nsLoginInfo(this.props.hostname, this.props.submitURL,
+ this.props.realm, this.props.username,
+ this.props.password,
+ this.props.usernameField,
+ this.props.passwordField);
+ Services.logins.addLogin(login);
+ login.QueryInterface(Ci.nsILoginMetaInfo);
+ return login.guid;
+ },
+
+ /**
+ * Find
+ *
+ * Finds a password entry in the login manager, for the password
+ * represented by this object's properties.
+ *
+ * @return the guid of the password if found, otherwise -1
+ */
+ Find: function() {
+ let logins = Services.logins.findLogins({},
+ this.props.hostname,
+ this.props.submitURL,
+ this.props.realm);
+ for (var i = 0; i < logins.length; i++) {
+ if (logins[i].username == this.props.username &&
+ logins[i].password == this.props.password &&
+ logins[i].usernameField == this.props.usernameField &&
+ logins[i].passwordField == this.props.passwordField) {
+ logins[i].QueryInterface(Ci.nsILoginMetaInfo);
+ return logins[i].guid;
+ }
+ }
+ return -1;
+ },
+
+ /**
+ * Update
+ *
+ * Updates an existing password entry in the login manager with
+ * new properties. Throws on error. The 'old' properties are this
+ * object's properties, the 'new' properties are the properties in
+ * this object's 'updateProps' object.
+ *
+ * @return nothing
+ */
+ Update: function() {
+ let oldlogin = new nsLoginInfo(this.props.hostname,
+ this.props.submitURL,
+ this.props.realm,
+ this.props.username,
+ this.props.password,
+ this.props.usernameField,
+ this.props.passwordField);
+ let newlogin = new nsLoginInfo(this.updateProps.hostname,
+ this.updateProps.submitURL,
+ this.updateProps.realm,
+ this.updateProps.username,
+ this.updateProps.password,
+ this.updateProps.usernameField,
+ this.updateProps.passwordField);
+ Services.logins.modifyLogin(oldlogin, newlogin);
+ },
+
+ /**
+ * Remove
+ *
+ * Removes an entry from the login manager for a password which
+ * matches this object's properties. Throws on error.
+ *
+ * @return nothing
+ */
+ Remove: function() {
+ let login = new nsLoginInfo(this.props.hostname,
+ this.props.submitURL,
+ this.props.realm,
+ this.props.username,
+ this.props.password,
+ this.props.usernameField,
+ this.props.passwordField);
+ Services.logins.removeLogin(login);
+ },
+};
diff --git a/services/sync/tps/extensions/tps/resource/modules/prefs.jsm b/services/sync/tps/extensions/tps/resource/modules/prefs.jsm
new file mode 100644
index 000000000..286c5a6b5
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/prefs.jsm
@@ -0,0 +1,117 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ Components.utils.import() and acts as a singleton.
+ Only the following listed symbols will exposed on import, and only when
+ and where imported. */
+
+var EXPORTED_SYMBOLS = ["Preference"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+const WEAVE_PREF_PREFIX = "services.sync.prefs.sync.";
+
+var prefs = Cc["@mozilla.org/preferences-service;1"]
+ .getService(Ci.nsIPrefBranch);
+
+Cu.import("resource://tps/logger.jsm");
+
+/**
+ * Preference class constructor
+ *
+ * Initializes instance properties.
+ */
+function Preference (props) {
+ Logger.AssertTrue("name" in props && "value" in props,
+ "Preference must have both name and value");
+
+ this.name = props.name;
+ this.value = props.value;
+}
+
+/**
+ * Preference instance methods
+ */
+Preference.prototype = {
+ /**
+ * Modify
+ *
+ * Sets the value of the preference this.name to this.value.
+ * Throws on error.
+ *
+ * @return nothing
+ */
+ Modify: function() {
+ // Determine if this pref is actually something Weave even looks at.
+ let weavepref = WEAVE_PREF_PREFIX + this.name;
+ try {
+ let syncPref = prefs.getBoolPref(weavepref);
+ if (!syncPref)
+ prefs.setBoolPref(weavepref, true);
+ }
+ catch(e) {
+ Logger.AssertTrue(false, "Weave doesn't sync pref " + this.name);
+ }
+
+ // Modify the pref; throw an exception if the pref type is different
+ // than the value type specified in the test.
+ let prefType = prefs.getPrefType(this.name);
+ switch (prefType) {
+ case Ci.nsIPrefBranch.PREF_INT:
+ Logger.AssertEqual(typeof(this.value), "number",
+ "Wrong type used for preference value");
+ prefs.setIntPref(this.name, this.value);
+ break;
+ case Ci.nsIPrefBranch.PREF_STRING:
+ Logger.AssertEqual(typeof(this.value), "string",
+ "Wrong type used for preference value");
+ prefs.setCharPref(this.name, this.value);
+ break;
+ case Ci.nsIPrefBranch.PREF_BOOL:
+ Logger.AssertEqual(typeof(this.value), "boolean",
+ "Wrong type used for preference value");
+ prefs.setBoolPref(this.name, this.value);
+ break;
+ }
+ },
+
+ /**
+ * Find
+ *
+ * Verifies that the preference this.name has the value
+ * this.value. Throws on error, or if the pref's type or value
+ * doesn't match.
+ *
+ * @return nothing
+ */
+ Find: function() {
+ // Read the pref value.
+ let value;
+ try {
+ let prefType = prefs.getPrefType(this.name);
+ switch(prefType) {
+ case Ci.nsIPrefBranch.PREF_INT:
+ value = prefs.getIntPref(this.name);
+ break;
+ case Ci.nsIPrefBranch.PREF_STRING:
+ value = prefs.getCharPref(this.name);
+ break;
+ case Ci.nsIPrefBranch.PREF_BOOL:
+ value = prefs.getBoolPref(this.name);
+ break;
+ }
+ }
+ catch (e) {
+ Logger.AssertTrue(false, "Error accessing pref " + this.name);
+ }
+
+ // Throw an exception if the current and expected values aren't of
+ // the same type, or don't have the same values.
+ Logger.AssertEqual(typeof(value), typeof(this.value),
+ "Value types don't match");
+ Logger.AssertEqual(value, this.value, "Preference values don't match");
+ },
+};
+
diff --git a/services/sync/tps/extensions/tps/resource/modules/tabs.jsm b/services/sync/tps/extensions/tps/resource/modules/tabs.jsm
new file mode 100644
index 000000000..af983573f
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/tabs.jsm
@@ -0,0 +1,67 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ Components.utils.import() and acts as a singleton.
+ Only the following listed symbols will exposed on import, and only when
+ and where imported. */
+
+const EXPORTED_SYMBOLS = ["BrowserTabs"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-sync/main.js");
+
+var BrowserTabs = {
+ /**
+ * Add
+ *
+ * Opens a new tab in the current browser window for the
+ * given uri. Throws on error.
+ *
+ * @param uri The uri to load in the new tab
+ * @return nothing
+ */
+ Add: function(uri, fn) {
+ // Open the uri in a new tab in the current browser window, and calls
+ // the callback fn from the tab's onload handler.
+ let wm = Cc["@mozilla.org/appshell/window-mediator;1"]
+ .getService(Ci.nsIWindowMediator);
+ let mainWindow = wm.getMostRecentWindow("navigator:browser");
+ let newtab = mainWindow.getBrowser().addTab(uri);
+ mainWindow.getBrowser().selectedTab = newtab;
+ let win = mainWindow.getBrowser().getBrowserForTab(newtab);
+ win.addEventListener("load", function() { fn.call(); }, true);
+ },
+
+ /**
+ * Find
+ *
+ * Finds the specified uri and title in Weave's list of remote tabs
+ * for the specified profile.
+ *
+ * @param uri The uri of the tab to find
+ * @param title The page title of the tab to find
+ * @param profile The profile to search for tabs
+ * @return true if the specified tab could be found, otherwise false
+ */
+ Find: function(uri, title, profile) {
+ // Find the uri in Weave's list of tabs for the given profile.
+ let engine = Weave.Service.engineManager.get("tabs");
+ for (let [guid, client] of Object.entries(engine.getAllClients())) {
+ if (!client.tabs) {
+ continue;
+ }
+ for (let key in client.tabs) {
+ let tab = client.tabs[key];
+ let weaveTabUrl = tab.urlHistory[0];
+ if (uri == weaveTabUrl && profile == client.clientName)
+ if (title == undefined || title == tab.title)
+ return true;
+ }
+ }
+ return false;
+ },
+};
+
diff --git a/services/sync/tps/extensions/tps/resource/modules/windows.jsm b/services/sync/tps/extensions/tps/resource/modules/windows.jsm
new file mode 100644
index 000000000..d892aea56
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/modules/windows.jsm
@@ -0,0 +1,36 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict";
+
+ /* This is a JavaScript module (JSM) to be imported via
+ Components.utils.import() and acts as a singleton.
+ Only the following listed symbols will exposed on import, and only when
+ and where imported. */
+
+const EXPORTED_SYMBOLS = ["BrowserWindows"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-sync/main.js");
+
+var BrowserWindows = {
+ /**
+ * Add
+ *
+ * Opens a new window. Throws on error.
+ *
+ * @param aPrivate The private option.
+ * @return nothing
+ */
+ Add: function(aPrivate, fn) {
+ let wm = Cc["@mozilla.org/appshell/window-mediator;1"]
+ .getService(Ci.nsIWindowMediator);
+ let mainWindow = wm.getMostRecentWindow("navigator:browser");
+ let win = mainWindow.OpenBrowserWindow({private: aPrivate});
+ win.addEventListener("load", function onLoad() {
+ win.removeEventListener("load", onLoad, false);
+ fn.call(win);
+ }, false);
+ }
+};
diff --git a/services/sync/tps/extensions/tps/resource/quit.js b/services/sync/tps/extensions/tps/resource/quit.js
new file mode 100644
index 000000000..0ec5498b0
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/quit.js
@@ -0,0 +1,63 @@
+/* -*- indent-tabs-mode: nil -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*
+ From mozilla/toolkit/content
+ These files did not have a license
+*/
+var EXPORTED_SYMBOLS = ["goQuitApplication"];
+
+Components.utils.import("resource://gre/modules/Services.jsm");
+
+function canQuitApplication() {
+ try {
+ var cancelQuit = Components.classes["@mozilla.org/supports-PRBool;1"]
+ .createInstance(Components.interfaces.nsISupportsPRBool);
+ Services.obs.notifyObservers(cancelQuit, "quit-application-requested", null);
+
+ // Something aborted the quit process.
+ if (cancelQuit.data) {
+ return false;
+ }
+ }
+ catch (ex) {}
+
+ return true;
+}
+
+function goQuitApplication() {
+ if (!canQuitApplication()) {
+ return false;
+ }
+
+ const kAppStartup = '@mozilla.org/toolkit/app-startup;1';
+ const kAppShell = '@mozilla.org/appshell/appShellService;1';
+ var appService;
+ var forceQuit;
+
+ if (kAppStartup in Components.classes) {
+ appService = Components.classes[kAppStartup]
+ .getService(Components.interfaces.nsIAppStartup);
+ forceQuit = Components.interfaces.nsIAppStartup.eForceQuit;
+ }
+ else if (kAppShell in Components.classes) {
+ appService = Components.classes[kAppShell].
+ getService(Components.interfaces.nsIAppShellService);
+ forceQuit = Components.interfaces.nsIAppShellService.eForceQuit;
+ }
+ else {
+ throw new Error('goQuitApplication: no AppStartup/appShell');
+ }
+
+ try {
+ appService.quit(forceQuit);
+ }
+ catch(ex) {
+ throw new Error('goQuitApplication: ' + ex);
+ }
+
+ return true;
+}
+
diff --git a/services/sync/tps/extensions/tps/resource/tps.jsm b/services/sync/tps/extensions/tps/resource/tps.jsm
new file mode 100644
index 000000000..f4cc0214a
--- /dev/null
+++ b/services/sync/tps/extensions/tps/resource/tps.jsm
@@ -0,0 +1,1340 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+ /* This is a JavaScript module (JSM) to be imported via
+ * Components.utils.import() and acts as a singleton. Only the following
+ * listed symbols will exposed on import, and only when and where imported.
+ */
+
+var EXPORTED_SYMBOLS = ["ACTIONS", "TPS"];
+
+const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+var module = this;
+
+// Global modules
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/AppConstants.jsm");
+Cu.import("resource://gre/modules/PlacesUtils.jsm");
+Cu.import("resource://gre/modules/FileUtils.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-sync/constants.js");
+Cu.import("resource://services-sync/main.js");
+Cu.import("resource://services-sync/util.js");
+Cu.import("resource://services-sync/telemetry.js");
+Cu.import("resource://services-sync/bookmark_validator.js");
+Cu.import("resource://services-sync/engines/passwords.js");
+Cu.import("resource://services-sync/engines/forms.js");
+Cu.import("resource://services-sync/engines/addons.js");
+// TPS modules
+Cu.import("resource://tps/logger.jsm");
+
+// Module wrappers for tests
+Cu.import("resource://tps/modules/addons.jsm");
+Cu.import("resource://tps/modules/bookmarks.jsm");
+Cu.import("resource://tps/modules/forms.jsm");
+Cu.import("resource://tps/modules/history.jsm");
+Cu.import("resource://tps/modules/passwords.jsm");
+Cu.import("resource://tps/modules/prefs.jsm");
+Cu.import("resource://tps/modules/tabs.jsm");
+Cu.import("resource://tps/modules/windows.jsm");
+
+var hh = Cc["@mozilla.org/network/protocol;1?name=http"]
+ .getService(Ci.nsIHttpProtocolHandler);
+var prefs = Cc["@mozilla.org/preferences-service;1"]
+ .getService(Ci.nsIPrefBranch);
+
+var mozmillInit = {};
+Cu.import('resource://mozmill/driver/mozmill.js', mozmillInit);
+
+XPCOMUtils.defineLazyGetter(this, "fileProtocolHandler", () => {
+ let fileHandler = Services.io.getProtocolHandler("file");
+ return fileHandler.QueryInterface(Ci.nsIFileProtocolHandler);
+});
+
+// Options for wiping data during a sync
+const SYNC_RESET_CLIENT = "resetClient";
+const SYNC_WIPE_CLIENT = "wipeClient";
+const SYNC_WIPE_REMOTE = "wipeRemote";
+
+// Actions a test can perform
+const ACTION_ADD = "add";
+const ACTION_DELETE = "delete";
+const ACTION_MODIFY = "modify";
+const ACTION_PRIVATE_BROWSING = "private-browsing";
+const ACTION_SET_ENABLED = "set-enabled";
+const ACTION_SYNC = "sync";
+const ACTION_SYNC_RESET_CLIENT = SYNC_RESET_CLIENT;
+const ACTION_SYNC_WIPE_CLIENT = SYNC_WIPE_CLIENT;
+const ACTION_SYNC_WIPE_REMOTE = SYNC_WIPE_REMOTE;
+const ACTION_VERIFY = "verify";
+const ACTION_VERIFY_NOT = "verify-not";
+
+const ACTIONS = [
+ ACTION_ADD,
+ ACTION_DELETE,
+ ACTION_MODIFY,
+ ACTION_PRIVATE_BROWSING,
+ ACTION_SET_ENABLED,
+ ACTION_SYNC,
+ ACTION_SYNC_RESET_CLIENT,
+ ACTION_SYNC_WIPE_CLIENT,
+ ACTION_SYNC_WIPE_REMOTE,
+ ACTION_VERIFY,
+ ACTION_VERIFY_NOT,
+];
+
+const OBSERVER_TOPICS = ["fxaccounts:onlogin",
+ "fxaccounts:onlogout",
+ "private-browsing",
+ "profile-before-change",
+ "sessionstore-windows-restored",
+ "weave:engine:start-tracking",
+ "weave:engine:stop-tracking",
+ "weave:service:login:error",
+ "weave:service:setup-complete",
+ "weave:service:sync:finish",
+ "weave:service:sync:delayed",
+ "weave:service:sync:error",
+ "weave:service:sync:start"
+ ];
+
+var TPS = {
+ _currentAction: -1,
+ _currentPhase: -1,
+ _enabledEngines: null,
+ _errors: 0,
+ _isTracking: false,
+ _operations_pending: 0,
+ _phaseFinished: false,
+ _phaselist: {},
+ _setupComplete: false,
+ _syncActive: false,
+ _syncCount: 0,
+ _syncsReportedViaTelemetry: 0,
+ _syncErrors: 0,
+ _syncWipeAction: null,
+ _tabsAdded: 0,
+ _tabsFinished: 0,
+ _test: null,
+ _triggeredSync: false,
+ _usSinceEpoch: 0,
+ _requestedQuit: false,
+ shouldValidateAddons: false,
+ shouldValidateBookmarks: false,
+ shouldValidatePasswords: false,
+ shouldValidateForms: false,
+
+ _init: function TPS__init() {
+ // Check if Firefox Accounts is enabled
+ let service = Cc["@mozilla.org/weave/service;1"]
+ .getService(Components.interfaces.nsISupports)
+ .wrappedJSObject;
+ this.fxaccounts_enabled = service.fxAccountsEnabled;
+
+ this.delayAutoSync();
+
+ OBSERVER_TOPICS.forEach(function (aTopic) {
+ Services.obs.addObserver(this, aTopic, true);
+ }, this);
+
+ // Configure some logging prefs for Sync itself.
+ Weave.Svc.Prefs.set("log.appender.dump", "Debug");
+ // Import the appropriate authentication module
+ if (this.fxaccounts_enabled) {
+ Cu.import("resource://tps/auth/fxaccounts.jsm", module);
+ }
+ else {
+ Cu.import("resource://tps/auth/sync.jsm", module);
+ }
+ },
+
+ DumpError(msg, exc = null) {
+ this._errors++;
+ let errInfo;
+ if (exc) {
+ errInfo = Log.exceptionStr(exc); // includes details and stack-trace.
+ } else {
+ // always write a stack even if no error passed.
+ errInfo = Log.stackTrace(new Error());
+ }
+ Logger.logError(`[phase ${this._currentPhase}] ${msg} - ${errInfo}`);
+ this.quit();
+ },
+
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsIObserver,
+ Ci.nsISupportsWeakReference]),
+
+ observe: function TPS__observe(subject, topic, data) {
+ try {
+ Logger.logInfo("----------event observed: " + topic);
+
+ switch(topic) {
+ case "private-browsing":
+ Logger.logInfo("private browsing " + data);
+ break;
+
+ case "profile-before-change":
+ OBSERVER_TOPICS.forEach(function(topic) {
+ Services.obs.removeObserver(this, topic);
+ }, this);
+
+ Logger.close();
+
+ break;
+
+ case "sessionstore-windows-restored":
+ Utils.nextTick(this.RunNextTestAction, this);
+ break;
+
+ case "weave:service:setup-complete":
+ this._setupComplete = true;
+
+ if (this._syncWipeAction) {
+ Weave.Svc.Prefs.set("firstSync", this._syncWipeAction);
+ this._syncWipeAction = null;
+ }
+
+ break;
+
+ case "weave:service:sync:error":
+ this._syncActive = false;
+
+ this.delayAutoSync();
+
+ // If this is the first sync error, retry...
+ if (this._syncErrors === 0) {
+ Logger.logInfo("Sync error; retrying...");
+ this._syncErrors++;
+ Utils.nextTick(this.RunNextTestAction, this);
+ }
+ else {
+ this._triggeredSync = false;
+ this.DumpError("Sync error; aborting test");
+ return;
+ }
+
+ break;
+
+ case "weave:service:sync:finish":
+ this._syncActive = false;
+ this._syncErrors = 0;
+ this._triggeredSync = false;
+
+ this.delayAutoSync();
+
+ // Wait a second before continuing, otherwise we can get
+ // 'sync not complete' errors.
+ Utils.namedTimer(function () {
+ this.FinishAsyncOperation();
+ }, 1000, this, "postsync");
+
+ break;
+
+ case "weave:service:sync:start":
+ // Ensure that the sync operation has been started by TPS
+ if (!this._triggeredSync) {
+ this.DumpError("Automatic sync got triggered, which is not allowed.")
+ }
+
+ this._syncActive = true;
+ break;
+
+ case "weave:engine:start-tracking":
+ this._isTracking = true;
+ break;
+
+ case "weave:engine:stop-tracking":
+ this._isTracking = false;
+ break;
+ }
+ }
+ catch (e) {
+ this.DumpError("Observer failed", e);
+ return;
+ }
+ },
+
+ /**
+ * Given that we cannot complely disable the automatic sync operations, we
+ * massively delay the next sync. Sync operations have to only happen when
+ * directly called via TPS.Sync()!
+ */
+ delayAutoSync: function TPS_delayAutoSync() {
+ Weave.Svc.Prefs.set("scheduler.eolInterval", 7200);
+ Weave.Svc.Prefs.set("scheduler.immediateInterval", 7200);
+ Weave.Svc.Prefs.set("scheduler.idleInterval", 7200);
+ Weave.Svc.Prefs.set("scheduler.activeInterval", 7200);
+ Weave.Svc.Prefs.set("syncThreshold", 10000000);
+ },
+
+ StartAsyncOperation: function TPS__StartAsyncOperation() {
+ this._operations_pending++;
+ },
+
+ FinishAsyncOperation: function TPS__FinishAsyncOperation() {
+ this._operations_pending--;
+ if (!this.operations_pending) {
+ this._currentAction++;
+ Utils.nextTick(function() {
+ this.RunNextTestAction();
+ }, this);
+ }
+ },
+
+ quit: function TPS__quit() {
+ this._requestedQuit = true;
+ this.goQuitApplication();
+ },
+
+ HandleWindows: function (aWindow, action) {
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on window " + JSON.stringify(aWindow));
+ switch(action) {
+ case ACTION_ADD:
+ BrowserWindows.Add(aWindow.private, function(win) {
+ Logger.logInfo("window finished loading");
+ this.FinishAsyncOperation();
+ }.bind(this));
+ break;
+ }
+ Logger.logPass("executing action " + action.toUpperCase() + " on windows");
+ },
+
+ HandleTabs: function (tabs, action) {
+ this._tabsAdded = tabs.length;
+ this._tabsFinished = 0;
+ for (let tab of tabs) {
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on tab " + JSON.stringify(tab));
+ switch(action) {
+ case ACTION_ADD:
+ // When adding tabs, we keep track of how many tabs we're adding,
+ // and wait until we've received that many onload events from our
+ // new tabs before continuing
+ let that = this;
+ let taburi = tab.uri;
+ BrowserTabs.Add(tab.uri, function() {
+ that._tabsFinished++;
+ Logger.logInfo("tab for " + taburi + " finished loading");
+ if (that._tabsFinished == that._tabsAdded) {
+ Logger.logInfo("all tabs loaded, continuing...");
+
+ // Wait a second before continuing to be sure tabs can be synced,
+ // otherwise we can get 'error locating tab'
+ Utils.namedTimer(function () {
+ that.FinishAsyncOperation();
+ }, 1000, this, "postTabsOpening");
+ }
+ });
+ break;
+ case ACTION_VERIFY:
+ Logger.AssertTrue(typeof(tab.profile) != "undefined",
+ "profile must be defined when verifying tabs");
+ Logger.AssertTrue(
+ BrowserTabs.Find(tab.uri, tab.title, tab.profile), "error locating tab");
+ break;
+ case ACTION_VERIFY_NOT:
+ Logger.AssertTrue(typeof(tab.profile) != "undefined",
+ "profile must be defined when verifying tabs");
+ Logger.AssertTrue(
+ !BrowserTabs.Find(tab.uri, tab.title, tab.profile),
+ "tab found which was expected to be absent");
+ break;
+ default:
+ Logger.AssertTrue(false, "invalid action: " + action);
+ }
+ }
+ Logger.logPass("executing action " + action.toUpperCase() + " on tabs");
+ },
+
+ HandlePrefs: function (prefs, action) {
+ for (let pref of prefs) {
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on pref " + JSON.stringify(pref));
+ let preference = new Preference(pref);
+ switch(action) {
+ case ACTION_MODIFY:
+ preference.Modify();
+ break;
+ case ACTION_VERIFY:
+ preference.Find();
+ break;
+ default:
+ Logger.AssertTrue(false, "invalid action: " + action);
+ }
+ }
+ Logger.logPass("executing action " + action.toUpperCase() + " on pref");
+ },
+
+ HandleForms: function (data, action) {
+ this.shouldValidateForms = true;
+ for (let datum of data) {
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on form entry " + JSON.stringify(datum));
+ let formdata = new FormData(datum, this._usSinceEpoch);
+ switch(action) {
+ case ACTION_ADD:
+ Async.promiseSpinningly(formdata.Create());
+ break;
+ case ACTION_DELETE:
+ Async.promiseSpinningly(formdata.Remove());
+ break;
+ case ACTION_VERIFY:
+ Logger.AssertTrue(Async.promiseSpinningly(formdata.Find()),
+ "form data not found");
+ break;
+ case ACTION_VERIFY_NOT:
+ Logger.AssertTrue(!Async.promiseSpinningly(formdata.Find()),
+ "form data found, but it shouldn't be present");
+ break;
+ default:
+ Logger.AssertTrue(false, "invalid action: " + action);
+ }
+ }
+ Logger.logPass("executing action " + action.toUpperCase() +
+ " on formdata");
+ },
+
+ HandleHistory: function (entries, action) {
+ try {
+ for (let entry of entries) {
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on history entry " + JSON.stringify(entry));
+ switch(action) {
+ case ACTION_ADD:
+ HistoryEntry.Add(entry, this._usSinceEpoch);
+ break;
+ case ACTION_DELETE:
+ HistoryEntry.Delete(entry, this._usSinceEpoch);
+ break;
+ case ACTION_VERIFY:
+ Logger.AssertTrue(HistoryEntry.Find(entry, this._usSinceEpoch),
+ "Uri visits not found in history database");
+ break;
+ case ACTION_VERIFY_NOT:
+ Logger.AssertTrue(!HistoryEntry.Find(entry, this._usSinceEpoch),
+ "Uri visits found in history database, but they shouldn't be");
+ break;
+ default:
+ Logger.AssertTrue(false, "invalid action: " + action);
+ }
+ }
+ Logger.logPass("executing action " + action.toUpperCase() +
+ " on history");
+ }
+ catch(e) {
+ DumpHistory();
+ throw(e);
+ }
+ },
+
+ HandlePasswords: function (passwords, action) {
+ this.shouldValidatePasswords = true;
+ try {
+ for (let password of passwords) {
+ let password_id = -1;
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on password " + JSON.stringify(password));
+ let passwordOb = new Password(password);
+ switch (action) {
+ case ACTION_ADD:
+ Logger.AssertTrue(passwordOb.Create() > -1, "error adding password");
+ break;
+ case ACTION_VERIFY:
+ Logger.AssertTrue(passwordOb.Find() != -1, "password not found");
+ break;
+ case ACTION_VERIFY_NOT:
+ Logger.AssertTrue(passwordOb.Find() == -1,
+ "password found, but it shouldn't exist");
+ break;
+ case ACTION_DELETE:
+ Logger.AssertTrue(passwordOb.Find() != -1, "password not found");
+ passwordOb.Remove();
+ break;
+ case ACTION_MODIFY:
+ if (passwordOb.updateProps != null) {
+ Logger.AssertTrue(passwordOb.Find() != -1, "password not found");
+ passwordOb.Update();
+ }
+ break;
+ default:
+ Logger.AssertTrue(false, "invalid action: " + action);
+ }
+ }
+ Logger.logPass("executing action " + action.toUpperCase() +
+ " on passwords");
+ }
+ catch(e) {
+ DumpPasswords();
+ throw(e);
+ }
+ },
+
+ HandleAddons: function (addons, action, state) {
+ this.shouldValidateAddons = true;
+ for (let entry of addons) {
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on addon " + JSON.stringify(entry));
+ let addon = new Addon(this, entry);
+ switch(action) {
+ case ACTION_ADD:
+ addon.install();
+ break;
+ case ACTION_DELETE:
+ addon.uninstall();
+ break;
+ case ACTION_VERIFY:
+ Logger.AssertTrue(addon.find(state), 'addon ' + addon.id + ' not found');
+ break;
+ case ACTION_VERIFY_NOT:
+ Logger.AssertFalse(addon.find(state), 'addon ' + addon.id + " is present, but it shouldn't be");
+ break;
+ case ACTION_SET_ENABLED:
+ Logger.AssertTrue(addon.setEnabled(state), 'addon ' + addon.id + ' not found');
+ break;
+ default:
+ throw new Error("Unknown action for add-on: " + action);
+ }
+ }
+ Logger.logPass("executing action " + action.toUpperCase() +
+ " on addons");
+ },
+
+ HandleBookmarks: function (bookmarks, action) {
+ this.shouldValidateBookmarks = true;
+ try {
+ let items = [];
+ for (let folder in bookmarks) {
+ let last_item_pos = -1;
+ for (let bookmark of bookmarks[folder]) {
+ Logger.clearPotentialError();
+ let placesItem;
+ bookmark['location'] = folder;
+
+ if (last_item_pos != -1)
+ bookmark['last_item_pos'] = last_item_pos;
+ let item_id = -1;
+
+ if (action != ACTION_MODIFY && action != ACTION_DELETE)
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on bookmark " + JSON.stringify(bookmark));
+
+ if ("uri" in bookmark)
+ placesItem = new Bookmark(bookmark);
+ else if ("folder" in bookmark)
+ placesItem = new BookmarkFolder(bookmark);
+ else if ("livemark" in bookmark)
+ placesItem = new Livemark(bookmark);
+ else if ("separator" in bookmark)
+ placesItem = new Separator(bookmark);
+
+ if (action == ACTION_ADD) {
+ item_id = placesItem.Create();
+ }
+ else {
+ item_id = placesItem.Find();
+ if (action == ACTION_VERIFY_NOT) {
+ Logger.AssertTrue(item_id == -1,
+ "places item exists but it shouldn't: " +
+ JSON.stringify(bookmark));
+ }
+ else
+ Logger.AssertTrue(item_id != -1, "places item not found", true);
+ }
+
+ last_item_pos = placesItem.GetItemIndex();
+ items.push(placesItem);
+ }
+ }
+
+ if (action == ACTION_DELETE || action == ACTION_MODIFY) {
+ for (let item of items) {
+ Logger.logInfo("executing action " + action.toUpperCase() +
+ " on bookmark " + JSON.stringify(item));
+ switch(action) {
+ case ACTION_DELETE:
+ item.Remove();
+ break;
+ case ACTION_MODIFY:
+ if (item.updateProps != null)
+ item.Update();
+ break;
+ }
+ }
+ }
+
+ Logger.logPass("executing action " + action.toUpperCase() +
+ " on bookmarks");
+ }
+ catch (e) {
+ DumpBookmarks();
+ throw(e);
+ }
+ },
+
+ MozmillEndTestListener: function TPS__MozmillEndTestListener(obj) {
+ Logger.logInfo("mozmill endTest: " + JSON.stringify(obj));
+ if (obj.failed > 0) {
+ this.DumpError('mozmill test failed, name: ' + obj.name + ', reason: ' + JSON.stringify(obj.fails));
+ return;
+ }
+ else if ('skipped' in obj && obj.skipped) {
+ this.DumpError('mozmill test failed, name: ' + obj.name + ', reason: ' + obj.skipped_reason);
+ return;
+ }
+ else {
+ Utils.namedTimer(function() {
+ this.FinishAsyncOperation();
+ }, 2000, this, "postmozmilltest");
+ }
+ },
+
+ MozmillSetTestListener: function TPS__MozmillSetTestListener(obj) {
+ Logger.logInfo("mozmill setTest: " + obj.name);
+ },
+
+ Cleanup() {
+ try {
+ this.WipeServer();
+ } catch (ex) {
+ Logger.logError("Failed to wipe server: " + Log.exceptionStr(ex));
+ }
+ try {
+ if (Authentication.isLoggedIn) {
+ // signout and wait for Sync to completely reset itself.
+ Logger.logInfo("signing out");
+ let waiter = this.createEventWaiter("weave:service:start-over:finish");
+ Authentication.signOut();
+ waiter();
+ Logger.logInfo("signout complete");
+ }
+ } catch (e) {
+ Logger.logError("Failed to sign out: " + Log.exceptionStr(e));
+ }
+ },
+
+ /**
+ * Use Sync's bookmark validation code to see if we've corrupted the tree.
+ */
+ ValidateBookmarks() {
+
+ let getServerBookmarkState = () => {
+ let bookmarkEngine = Weave.Service.engineManager.get('bookmarks');
+ let collection = bookmarkEngine.itemSource();
+ let collectionKey = bookmarkEngine.service.collectionKeys.keyForCollection(bookmarkEngine.name);
+ collection.full = true;
+ let items = [];
+ collection.recordHandler = function(item) {
+ item.decrypt(collectionKey);
+ items.push(item.cleartext);
+ };
+ collection.get();
+ return items;
+ };
+ let serverRecordDumpStr;
+ try {
+ Logger.logInfo("About to perform bookmark validation");
+ let clientTree = Async.promiseSpinningly(PlacesUtils.promiseBookmarksTree("", {
+ includeItemIds: true
+ }));
+ let serverRecords = getServerBookmarkState();
+ // We can't wait until catch to stringify this, since at that point it will have cycles.
+ serverRecordDumpStr = JSON.stringify(serverRecords);
+
+ let validator = new BookmarkValidator();
+ let {problemData} = validator.compareServerWithClient(serverRecords, clientTree);
+
+ for (let {name, count} of problemData.getSummary()) {
+ // Exclude mobile showing up on the server hackily so that we don't
+ // report it every time, see bug 1273234 and 1274394 for more information.
+ if (name === "serverUnexpected" && problemData.serverUnexpected.indexOf("mobile") >= 0) {
+ --count;
+ }
+ if (count) {
+ // Log this out before we assert. This is useful in the context of TPS logs, since we
+ // can see the IDs in the test files.
+ Logger.logInfo(`Validation problem: "${name}": ${JSON.stringify(problemData[name])}`);
+ }
+ Logger.AssertEqual(count, 0, `Bookmark validation error of type ${name}`);
+ }
+ } catch (e) {
+ // Dump the client records (should always be doable)
+ DumpBookmarks();
+ // Dump the server records if gotten them already.
+ if (serverRecordDumpStr) {
+ Logger.logInfo("Server bookmark records:\n" + serverRecordDumpStr + "\n");
+ }
+ this.DumpError("Bookmark validation failed", e);
+ }
+ Logger.logInfo("Bookmark validation finished");
+ },
+
+ ValidateCollection(engineName, ValidatorType) {
+ let serverRecordDumpStr;
+ let clientRecordDumpStr;
+ try {
+ Logger.logInfo(`About to perform validation for "${engineName}"`);
+ let engine = Weave.Service.engineManager.get(engineName);
+ let validator = new ValidatorType(engine);
+ let serverRecords = validator.getServerItems(engine);
+ let clientRecords = Async.promiseSpinningly(validator.getClientItems());
+ try {
+ // This substantially improves the logs for addons while not making a
+ // substantial difference for the other two
+ clientRecordDumpStr = JSON.stringify(clientRecords.map(r => {
+ let res = validator.normalizeClientItem(r);
+ delete res.original; // Try and prevent cyclic references
+ return res;
+ }));
+ } catch (e) {
+ // ignore the error, the dump string is just here to make debugging easier.
+ clientRecordDumpStr = "<Cyclic value>";
+ }
+ try {
+ serverRecordDumpStr = JSON.stringify(serverRecords);
+ } catch (e) {
+ // as above
+ serverRecordDumpStr = "<Cyclic value>";
+ }
+ let { problemData } = validator.compareClientWithServer(clientRecords, serverRecords);
+ for (let { name, count } of problemData.getSummary()) {
+ if (count) {
+ Logger.logInfo(`Validation problem: "${name}": ${JSON.stringify(problemData[name])}`);
+ }
+ Logger.AssertEqual(count, 0, `Validation error for "${engineName}" of type "${name}"`);
+ }
+ } catch (e) {
+ // Dump the client records if possible
+ if (clientRecordDumpStr) {
+ Logger.logInfo(`Client state for ${engineName}:\n${clientRecordDumpStr}\n`);
+ }
+ // Dump the server records if gotten them already.
+ if (serverRecordDumpStr) {
+ Logger.logInfo(`Server state for ${engineName}:\n${serverRecordDumpStr}\n`);
+ }
+ this.DumpError(`Validation failed for ${engineName}`, e);
+ }
+ Logger.logInfo(`Validation finished for ${engineName}`);
+ },
+
+ ValidatePasswords() {
+ return this.ValidateCollection("passwords", PasswordValidator);
+ },
+
+ ValidateForms() {
+ return this.ValidateCollection("forms", FormValidator);
+ },
+
+ ValidateAddons() {
+ return this.ValidateCollection("addons", AddonValidator);
+ },
+
+ RunNextTestAction: function() {
+ try {
+ if (this._currentAction >=
+ this._phaselist[this._currentPhase].length) {
+ // Run necessary validations and then finish up
+ if (this.shouldValidateBookmarks) {
+ this.ValidateBookmarks();
+ }
+ if (this.shouldValidatePasswords) {
+ this.ValidatePasswords();
+ }
+ if (this.shouldValidateForms) {
+ this.ValidateForms();
+ }
+ if (this.shouldValidateAddons) {
+ this.ValidateAddons();
+ }
+ // Force this early so that we run the validation and detect missing pings
+ // *before* we start shutting down, since if we do it after, the python
+ // code won't notice the failure.
+ SyncTelemetry.shutdown();
+ // we're all done
+ Logger.logInfo("test phase " + this._currentPhase + ": " +
+ (this._errors ? "FAIL" : "PASS"));
+ this._phaseFinished = true;
+ this.quit();
+ return;
+ }
+ this.seconds_since_epoch = prefs.getIntPref("tps.seconds_since_epoch", 0);
+ if (this.seconds_since_epoch)
+ this._usSinceEpoch = this.seconds_since_epoch * 1000 * 1000;
+ else {
+ this.DumpError("seconds-since-epoch not set");
+ return;
+ }
+
+ let phase = this._phaselist[this._currentPhase];
+ let action = phase[this._currentAction];
+ Logger.logInfo("starting action: " + action[0].name);
+ action[0].apply(this, action.slice(1));
+
+ // if we're in an async operation, don't continue on to the next action
+ if (this._operations_pending)
+ return;
+
+ this._currentAction++;
+ }
+ catch(e) {
+ if (Async.isShutdownException(e)) {
+ if (this._requestedQuit) {
+ Logger.logInfo("Sync aborted due to requested shutdown");
+ } else {
+ this.DumpError("Sync aborted due to shutdown, but we didn't request it");
+ }
+ } else {
+ this.DumpError("RunNextTestAction failed", e);
+ }
+ return;
+ }
+ this.RunNextTestAction();
+ },
+
+ _getFileRelativeToSourceRoot(testFileURL, relativePath) {
+ let file = fileProtocolHandler.getFileFromURLSpec(testFileURL);
+ let root = file // <root>/services/sync/tests/tps/test_foo.js
+ .parent // <root>/services/sync/tests/tps
+ .parent // <root>/services/sync/tests
+ .parent // <root>/services/sync
+ .parent // <root>/services
+ .parent // <root>
+ ;
+ root.appendRelativePath(relativePath);
+ return root;
+ },
+
+ // Attempt to load the sync_ping_schema.json and initialize `this.pingValidator`
+ // based on the source of the tps file. Assumes that it's at "../unit/sync_ping_schema.json"
+ // relative to the directory the tps test file (testFile) is contained in.
+ _tryLoadPingSchema(testFile) {
+ try {
+ let schemaFile = this._getFileRelativeToSourceRoot(testFile,
+ "services/sync/tests/unit/sync_ping_schema.json");
+
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"]
+ .createInstance(Ci.nsIFileInputStream);
+
+ let jsonReader = Cc["@mozilla.org/dom/json;1"]
+ .createInstance(Components.interfaces.nsIJSON);
+
+ stream.init(schemaFile, FileUtils.MODE_RDONLY, FileUtils.PERMS_FILE, 0);
+ let schema = jsonReader.decodeFromStream(stream, stream.available());
+ Logger.logInfo("Successfully loaded schema")
+
+ // Importing resource://testing-common/* isn't possible from within TPS,
+ // so we load Ajv manually.
+ let ajvFile = this._getFileRelativeToSourceRoot(testFile, "testing/modules/ajv-4.1.1.js");
+ let ajvURL = fileProtocolHandler.getURLSpecFromFile(ajvFile);
+ let ns = {};
+ Cu.import(ajvURL, ns);
+ let ajv = new ns.Ajv({ async: "co*" });
+ this.pingValidator = ajv.compile(schema);
+ } catch (e) {
+ this.DumpError(`Failed to load ping schema and AJV relative to "${testFile}".`, e);
+ }
+ },
+
+ /**
+ * Runs a single test phase.
+ *
+ * This is the main entry point for each phase of a test. The TPS command
+ * line driver loads this module and calls into the function with the
+ * arguments from the command line.
+ *
+ * When a phase is executed, the file is loaded as JavaScript into the
+ * current object.
+ *
+ * The following keys in the options argument have meaning:
+ *
+ * - ignoreUnusedEngines If true, unused engines will be unloaded from
+ * Sync. This makes output easier to parse and is
+ * useful for debugging test failures.
+ *
+ * @param file
+ * String URI of the file to open.
+ * @param phase
+ * String name of the phase to run.
+ * @param logpath
+ * String path of the log file to write to.
+ * @param options
+ * Object defining addition run-time options.
+ */
+ RunTestPhase: function (file, phase, logpath, options) {
+ try {
+ let settings = options || {};
+
+ Logger.init(logpath);
+ Logger.logInfo("Sync version: " + WEAVE_VERSION);
+ Logger.logInfo("Firefox buildid: " + Services.appinfo.appBuildID);
+ Logger.logInfo("Firefox version: " + Services.appinfo.version);
+ Logger.logInfo("Firefox source revision: " + (AppConstants.SOURCE_REVISION_URL || "unknown"));
+ Logger.logInfo("Firefox platform: " + AppConstants.platform);
+ Logger.logInfo('Firefox Accounts enabled: ' + this.fxaccounts_enabled);
+
+ // do some sync housekeeping
+ if (Weave.Service.isLoggedIn) {
+ this.DumpError("Sync logged in on startup...profile may be dirty");
+ return;
+ }
+
+ // Wait for Sync service to become ready.
+ if (!Weave.Status.ready) {
+ this.waitForEvent("weave:service:ready");
+ }
+
+ // We only want to do this if we modified the bookmarks this phase.
+ this.shouldValidateBookmarks = false;
+
+ // Always give Sync an extra tick to initialize. If we waited for the
+ // service:ready event, this is required to ensure all handlers have
+ // executed.
+ Utils.nextTick(this._executeTestPhase.bind(this, file, phase, settings));
+ } catch(e) {
+ this.DumpError("RunTestPhase failed", e);
+ return;
+ }
+ },
+
+ /**
+ * Executes a single test phase.
+ *
+ * This is called by RunTestPhase() after the environment is validated.
+ */
+ _executeTestPhase: function _executeTestPhase(file, phase, settings) {
+ try {
+ this.config = JSON.parse(prefs.getCharPref('tps.config'));
+ // parse the test file
+ Services.scriptloader.loadSubScript(file, this);
+ this._currentPhase = phase;
+ if (this._currentPhase.startsWith("cleanup-")) {
+ let profileToClean = Cc["@mozilla.org/toolkit/profile-service;1"]
+ .getService(Ci.nsIToolkitProfileService)
+ .selectedProfile.name;
+ this.phases[this._currentPhase] = profileToClean;
+ this.Phase(this._currentPhase, [[this.Cleanup]]);
+ } else {
+ // Don't bother doing this for cleanup phases.
+ this._tryLoadPingSchema(file);
+ }
+ let this_phase = this._phaselist[this._currentPhase];
+
+ if (this_phase == undefined) {
+ this.DumpError("invalid phase " + this._currentPhase);
+ return;
+ }
+
+ if (this.phases[this._currentPhase] == undefined) {
+ this.DumpError("no profile defined for phase " + this._currentPhase);
+ return;
+ }
+
+ // If we have restricted the active engines, unregister engines we don't
+ // care about.
+ if (settings.ignoreUnusedEngines && Array.isArray(this._enabledEngines)) {
+ let names = {};
+ for (let name of this._enabledEngines) {
+ names[name] = true;
+ }
+
+ for (let engine of Weave.Service.engineManager.getEnabled()) {
+ if (!(engine.name in names)) {
+ Logger.logInfo("Unregistering unused engine: " + engine.name);
+ Weave.Service.engineManager.unregister(engine);
+ }
+ }
+ }
+ Logger.logInfo("Starting phase " + this._currentPhase);
+
+ Logger.logInfo("setting client.name to " + this.phases[this._currentPhase]);
+ Weave.Svc.Prefs.set("client.name", this.phases[this._currentPhase]);
+
+ this._interceptSyncTelemetry();
+
+ // start processing the test actions
+ this._currentAction = 0;
+ }
+ catch(e) {
+ this.DumpError("_executeTestPhase failed", e);
+ return;
+ }
+ },
+
+ /**
+ * Override sync telemetry functions so that we can detect errors generating
+ * the sync ping, and count how many pings we report.
+ */
+ _interceptSyncTelemetry() {
+ let originalObserve = SyncTelemetry.observe;
+ let self = this;
+ SyncTelemetry.observe = function() {
+ try {
+ originalObserve.apply(this, arguments);
+ } catch (e) {
+ self.DumpError("Error when generating sync telemetry", e);
+ }
+ };
+ SyncTelemetry.submit = record => {
+ Logger.logInfo("Intercepted sync telemetry submission: " + JSON.stringify(record));
+ this._syncsReportedViaTelemetry += record.syncs.length + (record.discarded || 0);
+ if (record.discarded) {
+ if (record.syncs.length != SyncTelemetry.maxPayloadCount) {
+ this.DumpError("Syncs discarded from ping before maximum payload count reached");
+ }
+ }
+ // If this is the shutdown ping, check and see that the telemetry saw all the syncs.
+ if (record.why === "shutdown") {
+ // If we happen to sync outside of tps manually causing it, its not an
+ // error in the telemetry, so we only complain if we didn't see all of them.
+ if (this._syncsReportedViaTelemetry < this._syncCount) {
+ this.DumpError(`Telemetry missed syncs: Saw ${this._syncsReportedViaTelemetry}, should have >= ${this._syncCount}.`);
+ }
+ }
+ if (!record.syncs.length) {
+ // Note: we're overwriting submit, so this is called even for pings that
+ // may have no data (which wouldn't be submitted to telemetry and would
+ // fail validation).
+ return;
+ }
+ if (!this.pingValidator(record)) {
+ // Note that we already logged the record.
+ this.DumpError("Sync ping validation failed with errors: " + JSON.stringify(this.pingValidator.errors));
+ }
+ };
+ },
+
+ /**
+ * Register a single phase with the test harness.
+ *
+ * This is called when loading individual test files.
+ *
+ * @param phasename
+ * String name of the phase being loaded.
+ * @param fnlist
+ * Array of functions/actions to perform.
+ */
+ Phase: function Test__Phase(phasename, fnlist) {
+ if (Object.keys(this._phaselist).length === 0) {
+ // This is the first phase, add that we need to login.
+ fnlist.unshift([this.Login]);
+ }
+ this._phaselist[phasename] = fnlist;
+ },
+
+ /**
+ * Restrict enabled Sync engines to a specified set.
+ *
+ * This can be called by a test to limit what engines are enabled. It is
+ * recommended to call it to reduce the overhead and log clutter for the
+ * test.
+ *
+ * The "clients" engine is special and is always enabled, so there is no
+ * need to specify it.
+ *
+ * @param names
+ * Array of Strings for engines to make active during the test.
+ */
+ EnableEngines: function EnableEngines(names) {
+ if (!Array.isArray(names)) {
+ throw new Error("Argument to RestrictEngines() is not an array: "
+ + typeof(names));
+ }
+
+ this._enabledEngines = names;
+ },
+
+ RunMozmillTest: function TPS__RunMozmillTest(testfile) {
+ var mozmillfile = Cc["@mozilla.org/file/local;1"]
+ .createInstance(Ci.nsILocalFile);
+ if (hh.oscpu.toLowerCase().indexOf('windows') > -1) {
+ let re = /\/(\w)\/(.*)/;
+ this.config.testdir = this.config.testdir.replace(re, "$1://$2").replace(/\//g, "\\");
+ }
+ mozmillfile.initWithPath(this.config.testdir);
+ mozmillfile.appendRelativePath(testfile);
+ Logger.logInfo("Running mozmill test " + mozmillfile.path);
+
+ var frame = {};
+ Cu.import('resource://mozmill/modules/frame.js', frame);
+ frame.events.addListener('setTest', this.MozmillSetTestListener.bind(this));
+ frame.events.addListener('endTest', this.MozmillEndTestListener.bind(this));
+ this.StartAsyncOperation();
+ frame.runTestFile(mozmillfile.path, null);
+ },
+
+ /**
+ * Return an object that when called, will block until the named event
+ * is observed. This is similar to waitForEvent, although is typically safer
+ * if you need to do some other work that may make the event fire.
+ *
+ * eg:
+ * doSomething(); // causes the event to be fired.
+ * waitForEvent("something");
+ * is risky as the call to doSomething may trigger the event before the
+ * waitForEvent call is made. Contrast with:
+ *
+ * let waiter = createEventWaiter("something"); // does *not* block.
+ * doSomething(); // causes the event to be fired.
+ * waiter(); // will return as soon as the event fires, even if it fires
+ * // before this function is called.
+ *
+ * @param aEventName
+ * String event to wait for.
+ */
+ createEventWaiter(aEventName) {
+ Logger.logInfo("Setting up wait for " + aEventName + "...");
+ let cb = Async.makeSpinningCallback();
+ Svc.Obs.add(aEventName, cb);
+ return function() {
+ try {
+ cb.wait();
+ } finally {
+ Svc.Obs.remove(aEventName, cb);
+ Logger.logInfo(aEventName + " observed!");
+ }
+ }
+ },
+
+
+ /**
+ * Synchronously wait for the named event to be observed.
+ *
+ * When the event is observed, the function will wait an extra tick before
+ * returning.
+ *
+ * Note that in general, you should probably use createEventWaiter unless you
+ * are 100% sure that the event being waited on can only be sent after this
+ * call adds the listener.
+ *
+ * @param aEventName
+ * String event to wait for.
+ */
+ waitForEvent: function waitForEvent(aEventName) {
+ this.createEventWaiter(aEventName)();
+ },
+
+ /**
+ * Waits for Sync to logged in before returning
+ */
+ waitForSetupComplete: function waitForSetup() {
+ if (!this._setupComplete) {
+ this.waitForEvent("weave:service:setup-complete");
+ }
+ },
+
+ /**
+ * Waits for Sync to be finished before returning
+ */
+ waitForSyncFinished: function TPS__waitForSyncFinished() {
+ if (this._syncActive) {
+ this.waitForEvent("weave:service:sync:finished");
+ }
+ },
+
+ /**
+ * Waits for Sync to start tracking before returning.
+ */
+ waitForTracking: function waitForTracking() {
+ if (!this._isTracking) {
+ this.waitForEvent("weave:engine:start-tracking");
+ }
+ },
+
+ /**
+ * Login on the server
+ */
+ Login: function Login(force) {
+ if (Authentication.isLoggedIn && !force) {
+ return;
+ }
+
+ Logger.logInfo("Setting client credentials and login.");
+ let account = this.fxaccounts_enabled ? this.config.fx_account
+ : this.config.sync_account;
+ Authentication.signIn(account);
+ this.waitForSetupComplete();
+ Logger.AssertEqual(Weave.Status.service, Weave.STATUS_OK, "Weave status OK");
+ this.waitForTracking();
+ // If fxaccounts is enabled we get an initial sync at login time - let
+ // that complete.
+ if (this.fxaccounts_enabled) {
+ this._triggeredSync = true;
+ this.waitForSyncFinished();
+ }
+ },
+
+ /**
+ * Triggers a sync operation
+ *
+ * @param {String} [wipeAction]
+ * Type of wipe to perform (resetClient, wipeClient, wipeRemote)
+ *
+ */
+ Sync: function TPS__Sync(wipeAction) {
+ Logger.logInfo("Executing Sync" + (wipeAction ? ": " + wipeAction : ""));
+
+ // Force a wipe action if requested. In case of an initial sync the pref
+ // will be overwritten by Sync itself (see bug 992198), so ensure that we
+ // also handle it via the "weave:service:setup-complete" notification.
+ if (wipeAction) {
+ this._syncWipeAction = wipeAction;
+ Weave.Svc.Prefs.set("firstSync", wipeAction);
+ }
+ else {
+ Weave.Svc.Prefs.reset("firstSync");
+ }
+
+ this.Login(false);
+ ++this._syncCount;
+
+ this._triggeredSync = true;
+ this.StartAsyncOperation();
+ Weave.Service.sync();
+ Logger.logInfo("Sync is complete");
+ },
+
+ WipeServer: function TPS__WipeServer() {
+ Logger.logInfo("Wiping data from server.");
+
+ this.Login(false);
+ Weave.Service.login();
+ Weave.Service.wipeServer();
+ },
+
+ /**
+ * Action which ensures changes are being tracked before returning.
+ */
+ EnsureTracking: function EnsureTracking() {
+ this.Login(false);
+ this.waitForTracking();
+ }
+};
+
+var Addons = {
+ install: function Addons__install(addons) {
+ TPS.HandleAddons(addons, ACTION_ADD);
+ },
+ setEnabled: function Addons__setEnabled(addons, state) {
+ TPS.HandleAddons(addons, ACTION_SET_ENABLED, state);
+ },
+ uninstall: function Addons__uninstall(addons) {
+ TPS.HandleAddons(addons, ACTION_DELETE);
+ },
+ verify: function Addons__verify(addons, state) {
+ TPS.HandleAddons(addons, ACTION_VERIFY, state);
+ },
+ verifyNot: function Addons__verifyNot(addons) {
+ TPS.HandleAddons(addons, ACTION_VERIFY_NOT);
+ },
+ skipValidation() {
+ TPS.shouldValidateAddons = false;
+ }
+};
+
+var Bookmarks = {
+ add: function Bookmarks__add(bookmarks) {
+ TPS.HandleBookmarks(bookmarks, ACTION_ADD);
+ },
+ modify: function Bookmarks__modify(bookmarks) {
+ TPS.HandleBookmarks(bookmarks, ACTION_MODIFY);
+ },
+ delete: function Bookmarks__delete(bookmarks) {
+ TPS.HandleBookmarks(bookmarks, ACTION_DELETE);
+ },
+ verify: function Bookmarks__verify(bookmarks) {
+ TPS.HandleBookmarks(bookmarks, ACTION_VERIFY);
+ },
+ verifyNot: function Bookmarks__verifyNot(bookmarks) {
+ TPS.HandleBookmarks(bookmarks, ACTION_VERIFY_NOT);
+ },
+ skipValidation() {
+ TPS.shouldValidateBookmarks = false;
+ }
+};
+
+var Formdata = {
+ add: function Formdata__add(formdata) {
+ this.HandleForms(formdata, ACTION_ADD);
+ },
+ delete: function Formdata__delete(formdata) {
+ this.HandleForms(formdata, ACTION_DELETE);
+ },
+ verify: function Formdata__verify(formdata) {
+ this.HandleForms(formdata, ACTION_VERIFY);
+ },
+ verifyNot: function Formdata__verifyNot(formdata) {
+ this.HandleForms(formdata, ACTION_VERIFY_NOT);
+ }
+};
+
+var History = {
+ add: function History__add(history) {
+ this.HandleHistory(history, ACTION_ADD);
+ },
+ delete: function History__delete(history) {
+ this.HandleHistory(history, ACTION_DELETE);
+ },
+ verify: function History__verify(history) {
+ this.HandleHistory(history, ACTION_VERIFY);
+ },
+ verifyNot: function History__verifyNot(history) {
+ this.HandleHistory(history, ACTION_VERIFY_NOT);
+ }
+};
+
+var Passwords = {
+ add: function Passwords__add(passwords) {
+ this.HandlePasswords(passwords, ACTION_ADD);
+ },
+ modify: function Passwords__modify(passwords) {
+ this.HandlePasswords(passwords, ACTION_MODIFY);
+ },
+ delete: function Passwords__delete(passwords) {
+ this.HandlePasswords(passwords, ACTION_DELETE);
+ },
+ verify: function Passwords__verify(passwords) {
+ this.HandlePasswords(passwords, ACTION_VERIFY);
+ },
+ verifyNot: function Passwords__verifyNot(passwords) {
+ this.HandlePasswords(passwords, ACTION_VERIFY_NOT);
+ },
+ skipValidation() {
+ TPS.shouldValidatePasswords = false;
+ }
+};
+
+var Prefs = {
+ modify: function Prefs__modify(prefs) {
+ TPS.HandlePrefs(prefs, ACTION_MODIFY);
+ },
+ verify: function Prefs__verify(prefs) {
+ TPS.HandlePrefs(prefs, ACTION_VERIFY);
+ }
+};
+
+var Tabs = {
+ add: function Tabs__add(tabs) {
+ TPS.StartAsyncOperation();
+ TPS.HandleTabs(tabs, ACTION_ADD);
+ },
+ verify: function Tabs__verify(tabs) {
+ TPS.HandleTabs(tabs, ACTION_VERIFY);
+ },
+ verifyNot: function Tabs__verifyNot(tabs) {
+ TPS.HandleTabs(tabs, ACTION_VERIFY_NOT);
+ }
+};
+
+var Windows = {
+ add: function Window__add(aWindow) {
+ TPS.StartAsyncOperation();
+ TPS.HandleWindows(aWindow, ACTION_ADD);
+ },
+};
+
+// Initialize TPS
+TPS._init();