summaryrefslogtreecommitdiffstats
path: root/services/common
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /services/common
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloadUXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip
Add m-esr52 at 52.6.0
Diffstat (limited to 'services/common')
-rw-r--r--services/common/async.js220
-rw-r--r--services/common/blocklist-clients.js310
-rw-r--r--services/common/blocklist-updater.js117
-rw-r--r--services/common/hawkclient.js346
-rw-r--r--services/common/hawkrequest.js198
-rw-r--r--services/common/kinto-http-client.js1891
-rw-r--r--services/common/kinto-offline-client.js4286
-rw-r--r--services/common/logmanager.js331
-rw-r--r--services/common/modules-testing/logging.js54
-rw-r--r--services/common/modules-testing/storageserver.js1677
-rw-r--r--services/common/modules-testing/utils.js42
-rw-r--r--services/common/moz.build48
-rw-r--r--services/common/observers.js150
-rw-r--r--services/common/rest.js764
-rw-r--r--services/common/services-common.js11
-rw-r--r--services/common/servicesComponents.manifest2
-rw-r--r--services/common/stringbundle.js203
-rw-r--r--services/common/tests/mach_commands.py111
-rw-r--r--services/common/tests/moz.build11
-rw-r--r--services/common/tests/run_storage_server.js25
-rw-r--r--services/common/tests/unit/head_global.js29
-rw-r--r--services/common/tests/unit/head_helpers.js172
-rw-r--r--services/common/tests/unit/head_http.js29
-rw-r--r--services/common/tests/unit/moz.build9
-rw-r--r--services/common/tests/unit/test_async_chain.js30
-rw-r--r--services/common/tests/unit/test_async_querySpinningly.js103
-rw-r--r--services/common/tests/unit/test_blocklist_certificates.js224
-rw-r--r--services/common/tests/unit/test_blocklist_clients.js412
-rw-r--r--services/common/tests/unit/test_blocklist_signatures.js510
-rw-r--r--services/common/tests/unit/test_blocklist_signatures/collection_signing_ee.pem.certspec5
-rw-r--r--services/common/tests/unit/test_blocklist_signatures/collection_signing_int.pem.certspec4
-rw-r--r--services/common/tests/unit/test_blocklist_signatures/collection_signing_root.pem.certspec4
-rw-r--r--services/common/tests/unit/test_blocklist_signatures/moz.build14
-rw-r--r--services/common/tests/unit/test_blocklist_updater.js173
-rw-r--r--services/common/tests/unit/test_hawkclient.js520
-rw-r--r--services/common/tests/unit/test_hawkrequest.js235
-rw-r--r--services/common/tests/unit/test_kinto.js412
-rw-r--r--services/common/tests/unit/test_load_modules.js69
-rw-r--r--services/common/tests/unit/test_logmanager.js229
-rw-r--r--services/common/tests/unit/test_observers.js84
-rw-r--r--services/common/tests/unit/test_restrequest.js873
-rw-r--r--services/common/tests/unit/test_storage_adapter.js269
-rw-r--r--services/common/tests/unit/test_storage_adapter/empty.sqlitebin0 -> 2048 bytes
-rw-r--r--services/common/tests/unit/test_storage_server.js692
-rw-r--r--services/common/tests/unit/test_tokenauthenticatedrequest.js52
-rw-r--r--services/common/tests/unit/test_tokenserverclient.js466
-rw-r--r--services/common/tests/unit/test_utils_atob.js11
-rw-r--r--services/common/tests/unit/test_utils_convert_string.js132
-rw-r--r--services/common/tests/unit/test_utils_dateprefs.js85
-rw-r--r--services/common/tests/unit/test_utils_deepCopy.js18
-rw-r--r--services/common/tests/unit/test_utils_encodeBase32.js51
-rw-r--r--services/common/tests/unit/test_utils_encodeBase64URL.js27
-rw-r--r--services/common/tests/unit/test_utils_ensureMillisecondsTimestamp.js23
-rw-r--r--services/common/tests/unit/test_utils_json.js40
-rw-r--r--services/common/tests/unit/test_utils_makeURI.js66
-rw-r--r--services/common/tests/unit/test_utils_namedTimer.js69
-rw-r--r--services/common/tests/unit/test_utils_sets.js72
-rw-r--r--services/common/tests/unit/test_utils_utf8.js11
-rw-r--r--services/common/tests/unit/test_utils_uuid.js12
-rw-r--r--services/common/tests/unit/xpcshell.ini53
-rw-r--r--services/common/tokenserverclient.js462
-rw-r--r--services/common/utils.js645
62 files changed, 18193 insertions, 0 deletions
diff --git a/services/common/async.js b/services/common/async.js
new file mode 100644
index 000000000..aa977cbef
--- /dev/null
+++ b/services/common/async.js
@@ -0,0 +1,220 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["Async"];
+
+var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
+
+// Constants for makeSyncCallback, waitForSyncCallback.
+const CB_READY = {};
+const CB_COMPLETE = {};
+const CB_FAIL = {};
+
+const REASON_ERROR = Ci.mozIStorageStatementCallback.REASON_ERROR;
+
+Cu.import("resource://gre/modules/Services.jsm");
+
+/*
+ * Helpers for various async operations.
+ */
+this.Async = {
+
+ /**
+ * Execute an arbitrary number of asynchronous functions one after the
+ * other, passing the callback arguments on to the next one. All functions
+ * must take a callback function as their last argument. The 'this' object
+ * will be whatever chain()'s is.
+ *
+ * @usage this._chain = Async.chain;
+ * this._chain(this.foo, this.bar, this.baz)(args, for, foo)
+ *
+ * This is equivalent to:
+ *
+ * let self = this;
+ * self.foo(args, for, foo, function (bars, args) {
+ * self.bar(bars, args, function (baz, params) {
+ * self.baz(baz, params);
+ * });
+ * });
+ */
+ chain: function chain() {
+ let funcs = Array.slice(arguments);
+ let thisObj = this;
+ return function callback() {
+ if (funcs.length) {
+ let args = Array.slice(arguments).concat(callback);
+ let f = funcs.shift();
+ f.apply(thisObj, args);
+ }
+ };
+ },
+
+ /**
+ * Helpers for making asynchronous calls within a synchronous API possible.
+ *
+ * If you value your sanity, do not look closely at the following functions.
+ */
+
+ /**
+ * Create a sync callback that remembers state, in particular whether it has
+ * been called.
+ * The returned callback can be called directly passing an optional arg which
+ * will be returned by waitForSyncCallback(). The callback also has a
+ * .throw() method, which takes an error object and will cause
+ * waitForSyncCallback to fail with the error object thrown as an exception
+ * (but note that the .throw method *does not* itself throw - it just causes
+ * the wait function to throw).
+ */
+ makeSyncCallback: function makeSyncCallback() {
+ // The main callback remembers the value it was passed, and that it got data.
+ let onComplete = function onComplete(data) {
+ onComplete.state = CB_COMPLETE;
+ onComplete.value = data;
+ };
+
+ // Initialize private callback data in preparation for being called.
+ onComplete.state = CB_READY;
+ onComplete.value = null;
+
+ // Allow an alternate callback to trigger an exception to be thrown.
+ onComplete.throw = function onComplete_throw(data) {
+ onComplete.state = CB_FAIL;
+ onComplete.value = data;
+ };
+
+ return onComplete;
+ },
+
+ /**
+ * Wait for a sync callback to finish.
+ */
+ waitForSyncCallback: function waitForSyncCallback(callback) {
+ // Grab the current thread so we can make it give up priority.
+ let thread = Cc["@mozilla.org/thread-manager;1"].getService().currentThread;
+
+ // Keep waiting until our callback is triggered (unless the app is quitting).
+ while (Async.checkAppReady() && callback.state == CB_READY) {
+ thread.processNextEvent(true);
+ }
+
+ // Reset the state of the callback to prepare for another call.
+ let state = callback.state;
+ callback.state = CB_READY;
+
+ // Throw the value the callback decided to fail with.
+ if (state == CB_FAIL) {
+ throw callback.value;
+ }
+
+ // Return the value passed to the callback.
+ return callback.value;
+ },
+
+ /**
+ * Check if the app is still ready (not quitting).
+ */
+ checkAppReady: function checkAppReady() {
+ // Watch for app-quit notification to stop any sync calls
+ Services.obs.addObserver(function onQuitApplication() {
+ Services.obs.removeObserver(onQuitApplication, "quit-application");
+ Async.checkAppReady = function() {
+ let exception = Components.Exception("App. Quitting", Cr.NS_ERROR_ABORT);
+ exception.appIsShuttingDown = true;
+ throw exception;
+ };
+ }, "quit-application", false);
+ // In the common case, checkAppReady just returns true
+ return (Async.checkAppReady = function() { return true; })();
+ },
+
+ /**
+ * Check if the passed exception is one raised by checkAppReady. Typically
+ * this will be used in exception handlers to allow such exceptions to
+ * make their way to the top frame and allow the app to actually terminate.
+ */
+ isShutdownException(exception) {
+ return exception && exception.appIsShuttingDown === true;
+ },
+
+ /**
+ * Return the two things you need to make an asynchronous call synchronous
+ * by spinning the event loop.
+ */
+ makeSpinningCallback: function makeSpinningCallback() {
+ let cb = Async.makeSyncCallback();
+ function callback(error, ret) {
+ if (error)
+ cb.throw(error);
+ else
+ cb(ret);
+ }
+ callback.wait = () => Async.waitForSyncCallback(cb);
+ return callback;
+ },
+
+ // Prototype for mozIStorageCallback, used in querySpinningly.
+ // This allows us to define the handle* functions just once rather
+ // than on every querySpinningly invocation.
+ _storageCallbackPrototype: {
+ results: null,
+
+ // These are set by queryAsync.
+ names: null,
+ syncCb: null,
+
+ handleResult: function handleResult(results) {
+ if (!this.names) {
+ return;
+ }
+ if (!this.results) {
+ this.results = [];
+ }
+ let row;
+ while ((row = results.getNextRow()) != null) {
+ let item = {};
+ for (let name of this.names) {
+ item[name] = row.getResultByName(name);
+ }
+ this.results.push(item);
+ }
+ },
+ handleError: function handleError(error) {
+ this.syncCb.throw(error);
+ },
+ handleCompletion: function handleCompletion(reason) {
+
+ // If we got an error, handleError will also have been called, so don't
+ // call the callback! We never cancel statements, so we don't need to
+ // address that quandary.
+ if (reason == REASON_ERROR)
+ return;
+
+ // If we were called with column names but didn't find any results,
+ // the calling code probably still expects an array as a return value.
+ if (this.names && !this.results) {
+ this.results = [];
+ }
+ this.syncCb(this.results);
+ }
+ },
+
+ querySpinningly: function querySpinningly(query, names) {
+ // 'Synchronously' asyncExecute, fetching all results by name.
+ let storageCallback = Object.create(Async._storageCallbackPrototype);
+ storageCallback.names = names;
+ storageCallback.syncCb = Async.makeSyncCallback();
+ query.executeAsync(storageCallback);
+ return Async.waitForSyncCallback(storageCallback.syncCb);
+ },
+
+ promiseSpinningly(promise) {
+ let cb = Async.makeSpinningCallback();
+ promise.then(result => {
+ cb(null, result);
+ }, err => {
+ cb(err || new Error("Promise rejected without explicit error"));
+ });
+ return cb.wait();
+ },
+};
diff --git a/services/common/blocklist-clients.js b/services/common/blocklist-clients.js
new file mode 100644
index 000000000..fc51aaca4
--- /dev/null
+++ b/services/common/blocklist-clients.js
@@ -0,0 +1,310 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = ["AddonBlocklistClient",
+ "GfxBlocklistClient",
+ "OneCRLBlocklistClient",
+ "PluginBlocklistClient",
+ "FILENAME_ADDONS_JSON",
+ "FILENAME_GFX_JSON",
+ "FILENAME_PLUGINS_JSON"];
+
+const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
+
+Cu.import("resource://gre/modules/Services.jsm");
+const { Task } = Cu.import("resource://gre/modules/Task.jsm");
+const { OS } = Cu.import("resource://gre/modules/osfile.jsm");
+Cu.importGlobalProperties(["fetch"]);
+
+const { loadKinto } = Cu.import("resource://services-common/kinto-offline-client.js");
+const { KintoHttpClient } = Cu.import("resource://services-common/kinto-http-client.js");
+const { CanonicalJSON } = Components.utils.import("resource://gre/modules/CanonicalJSON.jsm");
+
+const PREF_SETTINGS_SERVER = "services.settings.server";
+const PREF_BLOCKLIST_BUCKET = "services.blocklist.bucket";
+const PREF_BLOCKLIST_ONECRL_COLLECTION = "services.blocklist.onecrl.collection";
+const PREF_BLOCKLIST_ONECRL_CHECKED_SECONDS = "services.blocklist.onecrl.checked";
+const PREF_BLOCKLIST_ADDONS_COLLECTION = "services.blocklist.addons.collection";
+const PREF_BLOCKLIST_ADDONS_CHECKED_SECONDS = "services.blocklist.addons.checked";
+const PREF_BLOCKLIST_PLUGINS_COLLECTION = "services.blocklist.plugins.collection";
+const PREF_BLOCKLIST_PLUGINS_CHECKED_SECONDS = "services.blocklist.plugins.checked";
+const PREF_BLOCKLIST_GFX_COLLECTION = "services.blocklist.gfx.collection";
+const PREF_BLOCKLIST_GFX_CHECKED_SECONDS = "services.blocklist.gfx.checked";
+const PREF_BLOCKLIST_ENFORCE_SIGNING = "services.blocklist.signing.enforced";
+
+const INVALID_SIGNATURE = "Invalid content/signature";
+
+this.FILENAME_ADDONS_JSON = "blocklist-addons.json";
+this.FILENAME_GFX_JSON = "blocklist-gfx.json";
+this.FILENAME_PLUGINS_JSON = "blocklist-plugins.json";
+
+function mergeChanges(localRecords, changes) {
+ // Kinto.js adds attributes to local records that aren't present on server.
+ // (e.g. _status)
+ const stripPrivateProps = (obj) => {
+ return Object.keys(obj).reduce((current, key) => {
+ if (!key.startsWith("_")) {
+ current[key] = obj[key];
+ }
+ return current;
+ }, {});
+ };
+
+ const records = {};
+ // Local records by id.
+ localRecords.forEach((record) => records[record.id] = stripPrivateProps(record));
+ // All existing records are replaced by the version from the server.
+ changes.forEach((record) => records[record.id] = record);
+
+ return Object.values(records)
+ // Filter out deleted records.
+ .filter((record) => record.deleted != true)
+ // Sort list by record id.
+ .sort((a, b) => a.id < b.id ? -1 : a.id > b.id ? 1 : 0);
+}
+
+
+function fetchCollectionMetadata(collection) {
+ const client = new KintoHttpClient(collection.api.remote);
+ return client.bucket(collection.bucket).collection(collection.name).getData()
+ .then(result => {
+ return result.signature;
+ });
+}
+
+function fetchRemoteCollection(collection) {
+ const client = new KintoHttpClient(collection.api.remote);
+ return client.bucket(collection.bucket)
+ .collection(collection.name)
+ .listRecords({sort: "id"});
+}
+
+/**
+ * Helper to instantiate a Kinto client based on preferences for remote server
+ * URL and bucket name. It uses the `FirefoxAdapter` which relies on SQLite to
+ * persist the local DB.
+ */
+function kintoClient() {
+ let base = Services.prefs.getCharPref(PREF_SETTINGS_SERVER);
+ let bucket = Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET);
+
+ let Kinto = loadKinto();
+
+ let FirefoxAdapter = Kinto.adapters.FirefoxAdapter;
+
+ let config = {
+ remote: base,
+ bucket: bucket,
+ adapter: FirefoxAdapter,
+ };
+
+ return new Kinto(config);
+}
+
+
+class BlocklistClient {
+
+ constructor(collectionName, lastCheckTimePref, processCallback, signerName) {
+ this.collectionName = collectionName;
+ this.lastCheckTimePref = lastCheckTimePref;
+ this.processCallback = processCallback;
+ this.signerName = signerName;
+ }
+
+ validateCollectionSignature(payload, collection, ignoreLocal) {
+ return Task.spawn((function* () {
+ // this is a content-signature field from an autograph response.
+ const {x5u, signature} = yield fetchCollectionMetadata(collection);
+ const certChain = yield fetch(x5u).then((res) => res.text());
+
+ const verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
+ .createInstance(Ci.nsIContentSignatureVerifier);
+
+ let toSerialize;
+ if (ignoreLocal) {
+ toSerialize = {
+ last_modified: `${payload.last_modified}`,
+ data: payload.data
+ };
+ } else {
+ const localRecords = (yield collection.list()).data;
+ const records = mergeChanges(localRecords, payload.changes);
+ toSerialize = {
+ last_modified: `${payload.lastModified}`,
+ data: records
+ };
+ }
+
+ const serialized = CanonicalJSON.stringify(toSerialize);
+
+ if (verifier.verifyContentSignature(serialized, "p384ecdsa=" + signature,
+ certChain,
+ this.signerName)) {
+ // In case the hash is valid, apply the changes locally.
+ return payload;
+ }
+ throw new Error(INVALID_SIGNATURE);
+ }).bind(this));
+ }
+
+ /**
+ * Synchronize from Kinto server, if necessary.
+ *
+ * @param {int} lastModified the lastModified date (on the server) for
+ the remote collection.
+ * @param {Date} serverTime the current date return by the server.
+ * @return {Promise} which rejects on sync or process failure.
+ */
+ maybeSync(lastModified, serverTime) {
+ let db = kintoClient();
+ let opts = {};
+ let enforceCollectionSigning =
+ Services.prefs.getBoolPref(PREF_BLOCKLIST_ENFORCE_SIGNING);
+
+ // if there is a signerName and collection signing is enforced, add a
+ // hook for incoming changes that validates the signature
+ if (this.signerName && enforceCollectionSigning) {
+ opts.hooks = {
+ "incoming-changes": [this.validateCollectionSignature.bind(this)]
+ }
+ }
+
+ let collection = db.collection(this.collectionName, opts);
+
+ return Task.spawn((function* syncCollection() {
+ try {
+ yield collection.db.open();
+
+ let collectionLastModified = yield collection.db.getLastModified();
+ // If the data is up to date, there's no need to sync. We still need
+ // to record the fact that a check happened.
+ if (lastModified <= collectionLastModified) {
+ this.updateLastCheck(serverTime);
+ return;
+ }
+ // Fetch changes from server.
+ try {
+ let syncResult = yield collection.sync();
+ if (!syncResult.ok) {
+ throw new Error("Sync failed");
+ }
+ } catch (e) {
+ if (e.message == INVALID_SIGNATURE) {
+ // if sync fails with a signature error, it's likely that our
+ // local data has been modified in some way.
+ // We will attempt to fix this by retrieving the whole
+ // remote collection.
+ let payload = yield fetchRemoteCollection(collection);
+ yield this.validateCollectionSignature(payload, collection, true);
+ // if the signature is good (we haven't thrown), and the remote
+ // last_modified is newer than the local last_modified, replace the
+ // local data
+ const localLastModified = yield collection.db.getLastModified();
+ if (payload.last_modified >= localLastModified) {
+ yield collection.clear();
+ yield collection.loadDump(payload.data);
+ }
+ } else {
+ throw e;
+ }
+ }
+ // Read local collection of records.
+ let list = yield collection.list();
+
+ yield this.processCallback(list.data);
+
+ // Track last update.
+ this.updateLastCheck(serverTime);
+ } finally {
+ collection.db.close();
+ }
+ }).bind(this));
+ }
+
+ /**
+ * Save last time server was checked in users prefs.
+ *
+ * @param {Date} serverTime the current date return by server.
+ */
+ updateLastCheck(serverTime) {
+ let checkedServerTimeInSeconds = Math.round(serverTime / 1000);
+ Services.prefs.setIntPref(this.lastCheckTimePref, checkedServerTimeInSeconds);
+ }
+}
+
+/**
+ * Revoke the appropriate certificates based on the records from the blocklist.
+ *
+ * @param {Object} records current records in the local db.
+ */
+function* updateCertBlocklist(records) {
+ let certList = Cc["@mozilla.org/security/certblocklist;1"]
+ .getService(Ci.nsICertBlocklist);
+ for (let item of records) {
+ try {
+ if (item.issuerName && item.serialNumber) {
+ certList.revokeCertByIssuerAndSerial(item.issuerName,
+ item.serialNumber);
+ } else if (item.subject && item.pubKeyHash) {
+ certList.revokeCertBySubjectAndPubKey(item.subject,
+ item.pubKeyHash);
+ }
+ } catch (e) {
+ // prevent errors relating to individual blocklist entries from
+ // causing sync to fail. At some point in the future, we may want to
+ // accumulate telemetry on these failures.
+ Cu.reportError(e);
+ }
+ }
+ certList.saveEntries();
+}
+
+/**
+ * Write list of records into JSON file, and notify nsBlocklistService.
+ *
+ * @param {String} filename path relative to profile dir.
+ * @param {Object} records current records in the local db.
+ */
+function* updateJSONBlocklist(filename, records) {
+ // Write JSON dump for synchronous load at startup.
+ const path = OS.Path.join(OS.Constants.Path.profileDir, filename);
+ const serialized = JSON.stringify({data: records}, null, 2);
+ try {
+ yield OS.File.writeAtomic(path, serialized, {tmpPath: path + ".tmp"});
+
+ // Notify change to `nsBlocklistService`
+ const eventData = {filename: filename};
+ Services.cpmm.sendAsyncMessage("Blocklist:reload-from-disk", eventData);
+ } catch(e) {
+ Cu.reportError(e);
+ }
+}
+
+
+this.OneCRLBlocklistClient = new BlocklistClient(
+ Services.prefs.getCharPref(PREF_BLOCKLIST_ONECRL_COLLECTION),
+ PREF_BLOCKLIST_ONECRL_CHECKED_SECONDS,
+ updateCertBlocklist,
+ "onecrl.content-signature.mozilla.org"
+);
+
+this.AddonBlocklistClient = new BlocklistClient(
+ Services.prefs.getCharPref(PREF_BLOCKLIST_ADDONS_COLLECTION),
+ PREF_BLOCKLIST_ADDONS_CHECKED_SECONDS,
+ updateJSONBlocklist.bind(undefined, FILENAME_ADDONS_JSON)
+);
+
+this.GfxBlocklistClient = new BlocklistClient(
+ Services.prefs.getCharPref(PREF_BLOCKLIST_GFX_COLLECTION),
+ PREF_BLOCKLIST_GFX_CHECKED_SECONDS,
+ updateJSONBlocklist.bind(undefined, FILENAME_GFX_JSON)
+);
+
+this.PluginBlocklistClient = new BlocklistClient(
+ Services.prefs.getCharPref(PREF_BLOCKLIST_PLUGINS_COLLECTION),
+ PREF_BLOCKLIST_PLUGINS_CHECKED_SECONDS,
+ updateJSONBlocklist.bind(undefined, FILENAME_PLUGINS_JSON)
+);
diff --git a/services/common/blocklist-updater.js b/services/common/blocklist-updater.js
new file mode 100644
index 000000000..3b39b9552
--- /dev/null
+++ b/services/common/blocklist-updater.js
@@ -0,0 +1,117 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["checkVersions", "addTestBlocklistClient"];
+
+const { classes: Cc, Constructor: CC, interfaces: Ci, utils: Cu } = Components;
+
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
+Cu.importGlobalProperties(['fetch']);
+const BlocklistClients = Cu.import("resource://services-common/blocklist-clients.js", {});
+
+const PREF_SETTINGS_SERVER = "services.settings.server";
+const PREF_BLOCKLIST_CHANGES_PATH = "services.blocklist.changes.path";
+const PREF_BLOCKLIST_BUCKET = "services.blocklist.bucket";
+const PREF_BLOCKLIST_LAST_UPDATE = "services.blocklist.last_update_seconds";
+const PREF_BLOCKLIST_LAST_ETAG = "services.blocklist.last_etag";
+const PREF_BLOCKLIST_CLOCK_SKEW_SECONDS = "services.blocklist.clock_skew_seconds";
+
+
+const gBlocklistClients = {
+ [BlocklistClients.OneCRLBlocklistClient.collectionName]: BlocklistClients.OneCRLBlocklistClient,
+ [BlocklistClients.AddonBlocklistClient.collectionName]: BlocklistClients.AddonBlocklistClient,
+ [BlocklistClients.GfxBlocklistClient.collectionName]: BlocklistClients.GfxBlocklistClient,
+ [BlocklistClients.PluginBlocklistClient.collectionName]: BlocklistClients.PluginBlocklistClient
+};
+
+// Add a blocklist client for testing purposes. Do not use for any other purpose
+this.addTestBlocklistClient = (name, client) => { gBlocklistClients[name] = client; }
+
+// This is called by the ping mechanism.
+// returns a promise that rejects if something goes wrong
+this.checkVersions = function() {
+ return Task.spawn(function* syncClients() {
+ // Fetch a versionInfo object that looks like:
+ // {"data":[
+ // {
+ // "host":"kinto-ota.dev.mozaws.net",
+ // "last_modified":1450717104423,
+ // "bucket":"blocklists",
+ // "collection":"certificates"
+ // }]}
+ // Right now, we only use the collection name and the last modified info
+ let kintoBase = Services.prefs.getCharPref(PREF_SETTINGS_SERVER);
+ let changesEndpoint = kintoBase + Services.prefs.getCharPref(PREF_BLOCKLIST_CHANGES_PATH);
+ let blocklistsBucket = Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET);
+
+ // Use ETag to obtain a `304 Not modified` when no change occurred.
+ const headers = {};
+ if (Services.prefs.prefHasUserValue(PREF_BLOCKLIST_LAST_ETAG)) {
+ const lastEtag = Services.prefs.getCharPref(PREF_BLOCKLIST_LAST_ETAG);
+ if (lastEtag) {
+ headers["If-None-Match"] = lastEtag;
+ }
+ }
+
+ let response = yield fetch(changesEndpoint, {headers});
+
+ let versionInfo;
+ // No changes since last time. Go on with empty list of changes.
+ if (response.status == 304) {
+ versionInfo = {data: []};
+ } else {
+ versionInfo = yield response.json();
+ }
+
+ // If the server is failing, the JSON response might not contain the
+ // expected data (e.g. error response - Bug 1259145)
+ if (!versionInfo.hasOwnProperty("data")) {
+ throw new Error("Polling for changes failed.");
+ }
+
+ // Record new update time and the difference between local and server time
+ let serverTimeMillis = Date.parse(response.headers.get("Date"));
+
+ // negative clockDifference means local time is behind server time
+ // by the absolute of that value in seconds (positive means it's ahead)
+ let clockDifference = Math.floor((Date.now() - serverTimeMillis) / 1000);
+ Services.prefs.setIntPref(PREF_BLOCKLIST_CLOCK_SKEW_SECONDS, clockDifference);
+ Services.prefs.setIntPref(PREF_BLOCKLIST_LAST_UPDATE, serverTimeMillis / 1000);
+
+ let firstError;
+ for (let collectionInfo of versionInfo.data) {
+ // Skip changes that don't concern configured blocklist bucket.
+ if (collectionInfo.bucket != blocklistsBucket) {
+ continue;
+ }
+
+ let collection = collectionInfo.collection;
+ let client = gBlocklistClients[collection];
+ if (client && client.maybeSync) {
+ let lastModified = 0;
+ if (collectionInfo.last_modified) {
+ lastModified = collectionInfo.last_modified;
+ }
+ try {
+ yield client.maybeSync(lastModified, serverTimeMillis);
+ } catch (e) {
+ if (!firstError) {
+ firstError = e;
+ }
+ }
+ }
+ }
+ if (firstError) {
+ // cause the promise to reject by throwing the first observed error
+ throw firstError;
+ }
+
+ // Save current Etag for next poll.
+ if (response.headers.has("ETag")) {
+ const currentEtag = response.headers.get("ETag");
+ Services.prefs.setCharPref(PREF_BLOCKLIST_LAST_ETAG, currentEtag);
+ }
+ });
+};
diff --git a/services/common/hawkclient.js b/services/common/hawkclient.js
new file mode 100644
index 000000000..88e9c2f2d
--- /dev/null
+++ b/services/common/hawkclient.js
@@ -0,0 +1,346 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+/*
+ * HAWK is an HTTP authentication scheme using a message authentication code
+ * (MAC) algorithm to provide partial HTTP request cryptographic verification.
+ *
+ * For details, see: https://github.com/hueniverse/hawk
+ *
+ * With HAWK, it is essential that the clocks on clients and server not have an
+ * absolute delta of greater than one minute, as the HAWK protocol uses
+ * timestamps to reduce the possibility of replay attacks. However, it is
+ * likely that some clients' clocks will be more than a little off, especially
+ * in mobile devices, which would break HAWK-based services (like sync and
+ * firefox accounts) for those clients.
+ *
+ * This library provides a stateful HAWK client that calculates (roughly) the
+ * clock delta on the client vs the server. The library provides an interface
+ * for deriving HAWK credentials and making HAWK-authenticated REST requests to
+ * a single remote server. Therefore, callers who want to interact with
+ * multiple HAWK services should instantiate one HawkClient per service.
+ */
+
+this.EXPORTED_SYMBOLS = ["HawkClient"];
+
+var {interfaces: Ci, utils: Cu} = Components;
+
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://services-common/hawkrequest.js");
+Cu.import("resource://services-common/observers.js");
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+
+// log.appender.dump should be one of "Fatal", "Error", "Warn", "Info", "Config",
+// "Debug", "Trace" or "All". If none is specified, "Error" will be used by
+// default.
+// Note however that Sync will also add this log to *its* DumpAppender, so
+// in a Sync context it shouldn't be necessary to adjust this - however, that
+// also means error logs are likely to be dump'd twice but that's OK.
+const PREF_LOG_LEVEL = "services.common.hawk.log.appender.dump";
+
+// A pref that can be set so "sensitive" information (eg, personally
+// identifiable info, credentials, etc) will be logged.
+const PREF_LOG_SENSITIVE_DETAILS = "services.common.hawk.log.sensitive";
+
+XPCOMUtils.defineLazyGetter(this, "log", function() {
+ let log = Log.repository.getLogger("Hawk");
+ // We set the log itself to "debug" and set the level from the preference to
+ // the appender. This allows other things to send the logs to different
+ // appenders, while still allowing the pref to control what is seen via dump()
+ log.level = Log.Level.Debug;
+ let appender = new Log.DumpAppender();
+ log.addAppender(appender);
+ appender.level = Log.Level.Error;
+ try {
+ let level =
+ Services.prefs.getPrefType(PREF_LOG_LEVEL) == Ci.nsIPrefBranch.PREF_STRING
+ && Services.prefs.getCharPref(PREF_LOG_LEVEL);
+ appender.level = Log.Level[level] || Log.Level.Error;
+ } catch (e) {
+ log.error(e);
+ }
+
+ return log;
+});
+
+// A boolean to indicate if personally identifiable information (or anything
+// else sensitive, such as credentials) should be logged.
+XPCOMUtils.defineLazyGetter(this, 'logPII', function() {
+ try {
+ return Services.prefs.getBoolPref(PREF_LOG_SENSITIVE_DETAILS);
+ } catch (_) {
+ return false;
+ }
+});
+
+/*
+ * A general purpose client for making HAWK authenticated requests to a single
+ * host. Keeps track of the clock offset between the client and the host for
+ * computation of the timestamp in the HAWK Authorization header.
+ *
+ * Clients should create one HawkClient object per each server they wish to
+ * interact with.
+ *
+ * @param host
+ * The url of the host
+ */
+this.HawkClient = function(host) {
+ this.host = host;
+
+ // Clock offset in milliseconds between our client's clock and the date
+ // reported in responses from our host.
+ this._localtimeOffsetMsec = 0;
+}
+
+this.HawkClient.prototype = {
+
+ /*
+ * A boolean for feature detection.
+ */
+ willUTF8EncodeRequests: HAWKAuthenticatedRESTRequest.prototype.willUTF8EncodeObjectRequests,
+
+ /*
+ * Construct an error message for a response. Private.
+ *
+ * @param restResponse
+ * A RESTResponse object from a RESTRequest
+ *
+ * @param error
+ * A string or object describing the error
+ */
+ _constructError: function(restResponse, error) {
+ let errorObj = {
+ error: error,
+ // This object is likely to be JSON.stringify'd, but neither Error()
+ // objects nor Components.Exception objects do the right thing there,
+ // so we add a new element which is simply the .toString() version of
+ // the error object, so it does appear in JSON'd values.
+ errorString: error.toString(),
+ message: restResponse.statusText,
+ code: restResponse.status,
+ errno: restResponse.status,
+ toString() {
+ return this.code + ": " + this.message;
+ },
+ };
+ let retryAfter = restResponse.headers && restResponse.headers["retry-after"];
+ retryAfter = retryAfter ? parseInt(retryAfter) : retryAfter;
+ if (retryAfter) {
+ errorObj.retryAfter = retryAfter;
+ // and notify observers of the retry interval
+ if (this.observerPrefix) {
+ Observers.notify(this.observerPrefix + ":backoff:interval", retryAfter);
+ }
+ }
+ return errorObj;
+ },
+
+ /*
+ *
+ * Update clock offset by determining difference from date gives in the (RFC
+ * 1123) Date header of a server response. Because HAWK tolerates a window
+ * of one minute of clock skew (so two minutes total since the skew can be
+ * positive or negative), the simple method of calculating offset here is
+ * probably good enough. We keep the value in milliseconds to make life
+ * easier, even though the value will not have millisecond accuracy.
+ *
+ * @param dateString
+ * An RFC 1123 date string (e.g., "Mon, 13 Jan 2014 21:45:06 GMT")
+ *
+ * For HAWK clock skew and replay protection, see
+ * https://github.com/hueniverse/hawk#replay-protection
+ */
+ _updateClockOffset: function(dateString) {
+ try {
+ let serverDateMsec = Date.parse(dateString);
+ this._localtimeOffsetMsec = serverDateMsec - this.now();
+ log.debug("Clock offset vs " + this.host + ": " + this._localtimeOffsetMsec);
+ } catch(err) {
+ log.warn("Bad date header in server response: " + dateString);
+ }
+ },
+
+ /*
+ * Get the current clock offset in milliseconds.
+ *
+ * The offset is the number of milliseconds that must be added to the client
+ * clock to make it equal to the server clock. For example, if the client is
+ * five minutes ahead of the server, the localtimeOffsetMsec will be -300000.
+ */
+ get localtimeOffsetMsec() {
+ return this._localtimeOffsetMsec;
+ },
+
+ /*
+ * return current time in milliseconds
+ */
+ now: function() {
+ return Date.now();
+ },
+
+ /* A general method for sending raw RESTRequest calls authorized using HAWK
+ *
+ * @param path
+ * API endpoint path
+ * @param method
+ * The HTTP request method
+ * @param credentials
+ * Hawk credentials
+ * @param payloadObj
+ * An object that can be encodable as JSON as the payload of the
+ * request
+ * @param extraHeaders
+ * An object with header/value pairs to send with the request.
+ * @return Promise
+ * Returns a promise that resolves to the response of the API call,
+ * or is rejected with an error. If the server response can be parsed
+ * as JSON and contains an 'error' property, the promise will be
+ * rejected with this JSON-parsed response.
+ */
+ request: function(path, method, credentials=null, payloadObj={}, extraHeaders = {},
+ retryOK=true) {
+ method = method.toLowerCase();
+
+ let deferred = Promise.defer();
+ let uri = this.host + path;
+ let self = this;
+
+ function _onComplete(error) {
+ // |error| can be either a normal caught error or an explicitly created
+ // Components.Exception() error. Log it now as it might not end up
+ // correctly in the logs by the time it's passed through _constructError.
+ if (error) {
+ log.warn("hawk request error", error);
+ }
+ // If there's no response there's nothing else to do.
+ if (!this.response) {
+ deferred.reject(error);
+ return;
+ }
+ let restResponse = this.response;
+ let status = restResponse.status;
+
+ log.debug("(Response) " + path + ": code: " + status +
+ " - Status text: " + restResponse.statusText);
+ if (logPII) {
+ log.debug("Response text: " + restResponse.body);
+ }
+
+ // All responses may have backoff headers, which are a server-side safety
+ // valve to allow slowing down clients without hurting performance.
+ self._maybeNotifyBackoff(restResponse, "x-weave-backoff");
+ self._maybeNotifyBackoff(restResponse, "x-backoff");
+
+ if (error) {
+ // When things really blow up, reconstruct an error object that follows
+ // the general format of the server on error responses.
+ return deferred.reject(self._constructError(restResponse, error));
+ }
+
+ self._updateClockOffset(restResponse.headers["date"]);
+
+ if (status === 401 && retryOK && !("retry-after" in restResponse.headers)) {
+ // Retry once if we were rejected due to a bad timestamp.
+ // Clock offset is adjusted already in the top of this function.
+ log.debug("Received 401 for " + path + ": retrying");
+ return deferred.resolve(
+ self.request(path, method, credentials, payloadObj, extraHeaders, false));
+ }
+
+ // If the server returned a json error message, use it in the rejection
+ // of the promise.
+ //
+ // In the case of a 401, in which we are probably being rejected for a
+ // bad timestamp, retry exactly once, during which time clock offset will
+ // be adjusted.
+
+ let jsonResponse = {};
+ try {
+ jsonResponse = JSON.parse(restResponse.body);
+ } catch(notJSON) {}
+
+ let okResponse = (200 <= status && status < 300);
+ if (!okResponse || jsonResponse.error) {
+ if (jsonResponse.error) {
+ return deferred.reject(jsonResponse);
+ }
+ return deferred.reject(self._constructError(restResponse, "Request failed"));
+ }
+ // It's up to the caller to know how to decode the response.
+ // We just return the whole response.
+ deferred.resolve(this.response);
+ };
+
+ function onComplete(error) {
+ try {
+ // |this| is the RESTRequest object and we need to ensure _onComplete
+ // gets the same one.
+ _onComplete.call(this, error);
+ } catch (ex) {
+ log.error("Unhandled exception processing response", ex);
+ deferred.reject(ex);
+ }
+ }
+
+ let extra = {
+ now: this.now(),
+ localtimeOffsetMsec: this.localtimeOffsetMsec,
+ headers: extraHeaders
+ };
+
+ let request = this.newHAWKAuthenticatedRESTRequest(uri, credentials, extra);
+ try {
+ if (method == "post" || method == "put" || method == "patch") {
+ request[method](payloadObj, onComplete);
+ } else {
+ request[method](onComplete);
+ }
+ } catch (ex) {
+ log.error("Failed to make hawk request", ex);
+ deferred.reject(ex);
+ }
+
+ return deferred.promise;
+ },
+
+ /*
+ * The prefix used for all notifications sent by this module. This
+ * allows the handler of notifications to be sure they are handling
+ * notifications for the service they expect.
+ *
+ * If not set, no notifications will be sent.
+ */
+ observerPrefix: null,
+
+ // Given an optional header value, notify that a backoff has been requested.
+ _maybeNotifyBackoff: function (response, headerName) {
+ if (!this.observerPrefix || !response.headers) {
+ return;
+ }
+ let headerVal = response.headers[headerName];
+ if (!headerVal) {
+ return;
+ }
+ let backoffInterval;
+ try {
+ backoffInterval = parseInt(headerVal, 10);
+ } catch (ex) {
+ log.error("hawkclient response had invalid backoff value in '" +
+ headerName + "' header: " + headerVal);
+ return;
+ }
+ Observers.notify(this.observerPrefix + ":backoff:interval", backoffInterval);
+ },
+
+ // override points for testing.
+ newHAWKAuthenticatedRESTRequest: function(uri, credentials, extra) {
+ return new HAWKAuthenticatedRESTRequest(uri, credentials, extra);
+ },
+
+}
diff --git a/services/common/hawkrequest.js b/services/common/hawkrequest.js
new file mode 100644
index 000000000..454960b7b
--- /dev/null
+++ b/services/common/hawkrequest.js
@@ -0,0 +1,198 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+this.EXPORTED_SYMBOLS = [
+ "HAWKAuthenticatedRESTRequest",
+ "deriveHawkCredentials"
+];
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://gre/modules/Credentials.jsm");
+
+XPCOMUtils.defineLazyModuleGetter(this, "CryptoUtils",
+ "resource://services-crypto/utils.js");
+
+const Prefs = new Preferences("services.common.rest.");
+
+/**
+ * Single-use HAWK-authenticated HTTP requests to RESTish resources.
+ *
+ * @param uri
+ * (String) URI for the RESTRequest constructor
+ *
+ * @param credentials
+ * (Object) Optional credentials for computing HAWK authentication
+ * header.
+ *
+ * @param payloadObj
+ * (Object) Optional object to be converted to JSON payload
+ *
+ * @param extra
+ * (Object) Optional extra params for HAWK header computation.
+ * Valid properties are:
+ *
+ * now: <current time in milliseconds>,
+ * localtimeOffsetMsec: <local clock offset vs server>,
+ * headers: <An object with header/value pairs to be sent
+ * as headers on the request>
+ *
+ * extra.localtimeOffsetMsec is the value in milliseconds that must be added to
+ * the local clock to make it agree with the server's clock. For instance, if
+ * the local clock is two minutes ahead of the server, the time offset in
+ * milliseconds will be -120000.
+ */
+
+this.HAWKAuthenticatedRESTRequest =
+ function HawkAuthenticatedRESTRequest(uri, credentials, extra={}) {
+ RESTRequest.call(this, uri);
+
+ this.credentials = credentials;
+ this.now = extra.now || Date.now();
+ this.localtimeOffsetMsec = extra.localtimeOffsetMsec || 0;
+ this._log.trace("local time, offset: " + this.now + ", " + (this.localtimeOffsetMsec));
+ this.extraHeaders = extra.headers || {};
+
+ // Expose for testing
+ this._intl = getIntl();
+};
+HAWKAuthenticatedRESTRequest.prototype = {
+ __proto__: RESTRequest.prototype,
+
+ dispatch: function dispatch(method, data, onComplete, onProgress) {
+ let contentType = "text/plain";
+ if (method == "POST" || method == "PUT" || method == "PATCH") {
+ contentType = "application/json";
+ }
+ if (this.credentials) {
+ let options = {
+ now: this.now,
+ localtimeOffsetMsec: this.localtimeOffsetMsec,
+ credentials: this.credentials,
+ payload: data && JSON.stringify(data) || "",
+ contentType: contentType,
+ };
+ let header = CryptoUtils.computeHAWK(this.uri, method, options);
+ this.setHeader("Authorization", header.field);
+ this._log.trace("hawk auth header: " + header.field);
+ }
+
+ for (let header in this.extraHeaders) {
+ this.setHeader(header, this.extraHeaders[header]);
+ }
+
+ this.setHeader("Content-Type", contentType);
+
+ this.setHeader("Accept-Language", this._intl.accept_languages);
+
+ return RESTRequest.prototype.dispatch.call(
+ this, method, data, onComplete, onProgress
+ );
+ }
+};
+
+
+/**
+ * Generic function to derive Hawk credentials.
+ *
+ * Hawk credentials are derived using shared secrets, which depend on the token
+ * in use.
+ *
+ * @param tokenHex
+ * The current session token encoded in hex
+ * @param context
+ * A context for the credentials. A protocol version will be prepended
+ * to the context, see Credentials.keyWord for more information.
+ * @param size
+ * The size in bytes of the expected derived buffer,
+ * defaults to 3 * 32.
+ * @return credentials
+ * Returns an object:
+ * {
+ * algorithm: sha256
+ * id: the Hawk id (from the first 32 bytes derived)
+ * key: the Hawk key (from bytes 32 to 64)
+ * extra: size - 64 extra bytes (if size > 64)
+ * }
+ */
+this.deriveHawkCredentials = function deriveHawkCredentials(tokenHex,
+ context,
+ size = 96,
+ hexKey = false) {
+ let token = CommonUtils.hexToBytes(tokenHex);
+ let out = CryptoUtils.hkdf(token, undefined, Credentials.keyWord(context), size);
+
+ let result = {
+ algorithm: "sha256",
+ key: hexKey ? CommonUtils.bytesAsHex(out.slice(32, 64)) : out.slice(32, 64),
+ id: CommonUtils.bytesAsHex(out.slice(0, 32))
+ };
+ if (size > 64) {
+ result.extra = out.slice(64);
+ }
+
+ return result;
+}
+
+// With hawk request, we send the user's accepted-languages with each request.
+// To keep the number of times we read this pref at a minimum, maintain the
+// preference in a stateful object that notices and updates itself when the
+// pref is changed.
+this.Intl = function Intl() {
+ // We won't actually query the pref until the first time we need it
+ this._accepted = "";
+ this._everRead = false;
+ this._log = Log.repository.getLogger("Services.common.RESTRequest");
+ this._log.level = Log.Level[Prefs.get("log.logger.rest.request")];
+ this.init();
+};
+
+this.Intl.prototype = {
+ init: function() {
+ Services.prefs.addObserver("intl.accept_languages", this, false);
+ },
+
+ uninit: function() {
+ Services.prefs.removeObserver("intl.accept_languages", this);
+ },
+
+ observe: function(subject, topic, data) {
+ this.readPref();
+ },
+
+ readPref: function() {
+ this._everRead = true;
+ try {
+ this._accepted = Services.prefs.getComplexValue(
+ "intl.accept_languages", Ci.nsIPrefLocalizedString).data;
+ } catch (err) {
+ this._log.error("Error reading intl.accept_languages pref", err);
+ }
+ },
+
+ get accept_languages() {
+ if (!this._everRead) {
+ this.readPref();
+ }
+ return this._accepted;
+ },
+};
+
+// Singleton getter for Intl, creating an instance only when we first need it.
+var intl = null;
+function getIntl() {
+ if (!intl) {
+ intl = new Intl();
+ }
+ return intl;
+}
+
diff --git a/services/common/kinto-http-client.js b/services/common/kinto-http-client.js
new file mode 100644
index 000000000..57f6946d1
--- /dev/null
+++ b/services/common/kinto-http-client.js
@@ -0,0 +1,1891 @@
+/*
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This file is generated from kinto-http.js - do not modify directly.
+ */
+
+this.EXPORTED_SYMBOLS = ["KintoHttpClient"];
+
+/*
+ * Version 2.0.0 - 61435f3
+ */
+
+(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.KintoHttpClient = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
+/*
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = undefined;
+
+var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
+
+var _base = require("../src/base");
+
+var _base2 = _interopRequireDefault(_base);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const Cu = Components.utils;
+
+Cu.import("resource://gre/modules/Timer.jsm");
+Cu.importGlobalProperties(['fetch']);
+const { EventEmitter } = Cu.import("resource://devtools/shared/event-emitter.js", {});
+
+let KintoHttpClient = class KintoHttpClient extends _base2.default {
+ constructor(remote, options = {}) {
+ const events = {};
+ EventEmitter.decorate(events);
+ super(remote, _extends({ events }, options));
+ }
+};
+
+// This fixes compatibility with CommonJS required by browserify.
+// See http://stackoverflow.com/questions/33505992/babel-6-changes-how-it-exports-default/33683495#33683495
+
+exports.default = KintoHttpClient;
+if (typeof module === "object") {
+ module.exports = KintoHttpClient;
+}
+
+},{"../src/base":2}],2:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = exports.SUPPORTED_PROTOCOL_VERSION = undefined;
+
+var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
+
+var _dec, _dec2, _dec3, _dec4, _dec5, _dec6, _desc, _value, _class;
+
+var _utils = require("./utils");
+
+var _http = require("./http");
+
+var _http2 = _interopRequireDefault(_http);
+
+var _endpoint = require("./endpoint");
+
+var _endpoint2 = _interopRequireDefault(_endpoint);
+
+var _requests = require("./requests");
+
+var requests = _interopRequireWildcard(_requests);
+
+var _batch = require("./batch");
+
+var _bucket = require("./bucket");
+
+var _bucket2 = _interopRequireDefault(_bucket);
+
+function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function _applyDecoratedDescriptor(target, property, decorators, descriptor, context) {
+ var desc = {};
+ Object['ke' + 'ys'](descriptor).forEach(function (key) {
+ desc[key] = descriptor[key];
+ });
+ desc.enumerable = !!desc.enumerable;
+ desc.configurable = !!desc.configurable;
+
+ if ('value' in desc || desc.initializer) {
+ desc.writable = true;
+ }
+
+ desc = decorators.slice().reverse().reduce(function (desc, decorator) {
+ return decorator(target, property, desc) || desc;
+ }, desc);
+
+ if (context && desc.initializer !== void 0) {
+ desc.value = desc.initializer ? desc.initializer.call(context) : void 0;
+ desc.initializer = undefined;
+ }
+
+ if (desc.initializer === void 0) {
+ Object['define' + 'Property'](target, property, desc);
+ desc = null;
+ }
+
+ return desc;
+}
+
+/**
+ * Currently supported protocol version.
+ * @type {String}
+ */
+const SUPPORTED_PROTOCOL_VERSION = exports.SUPPORTED_PROTOCOL_VERSION = "v1";
+
+/**
+ * High level HTTP client for the Kinto API.
+ *
+ * @example
+ * const client = new KintoClient("https://kinto.dev.mozaws.net/v1");
+ * client.bucket("default")
+* .collection("my-blog")
+* .createRecord({title: "First article"})
+ * .then(console.log.bind(console))
+ * .catch(console.error.bind(console));
+ */
+let KintoClientBase = (_dec = (0, _utils.nobatch)("This operation is not supported within a batch operation."), _dec2 = (0, _utils.nobatch)("This operation is not supported within a batch operation."), _dec3 = (0, _utils.nobatch)("This operation is not supported within a batch operation."), _dec4 = (0, _utils.nobatch)("This operation is not supported within a batch operation."), _dec5 = (0, _utils.nobatch)("Can't use batch within a batch!"), _dec6 = (0, _utils.support)("1.4", "2.0"), (_class = class KintoClientBase {
+ /**
+ * Constructor.
+ *
+ * @param {String} remote The remote URL.
+ * @param {Object} [options={}] The options object.
+ * @param {Boolean} [options.safe=true] Adds concurrency headers to every requests.
+ * @param {EventEmitter} [options.events=EventEmitter] The events handler instance.
+ * @param {Object} [options.headers={}] The key-value headers to pass to each request.
+ * @param {String} [options.bucket="default"] The default bucket to use.
+ * @param {String} [options.requestMode="cors"] The HTTP request mode (from ES6 fetch spec).
+ * @param {Number} [options.timeout=5000] The requests timeout in ms.
+ */
+ constructor(remote, options = {}) {
+ if (typeof remote !== "string" || !remote.length) {
+ throw new Error("Invalid remote URL: " + remote);
+ }
+ if (remote[remote.length - 1] === "/") {
+ remote = remote.slice(0, -1);
+ }
+ this._backoffReleaseTime = null;
+
+ /**
+ * Default request options container.
+ * @private
+ * @type {Object}
+ */
+ this.defaultReqOptions = {
+ bucket: options.bucket || "default",
+ headers: options.headers || {},
+ safe: !!options.safe
+ };
+
+ this._options = options;
+ this._requests = [];
+ this._isBatch = !!options.batch;
+
+ // public properties
+ /**
+ * The remote server base URL.
+ * @type {String}
+ */
+ this.remote = remote;
+ /**
+ * Current server information.
+ * @ignore
+ * @type {Object|null}
+ */
+ this.serverInfo = null;
+ /**
+ * The event emitter instance. Should comply with the `EventEmitter`
+ * interface.
+ * @ignore
+ * @type {Class}
+ */
+ this.events = options.events;
+
+ const { requestMode, timeout } = options;
+ /**
+ * The HTTP instance.
+ * @ignore
+ * @type {HTTP}
+ */
+ this.http = new _http2.default(this.events, { requestMode, timeout });
+ this._registerHTTPEvents();
+ }
+
+ /**
+ * The remote endpoint base URL. Setting the value will also extract and
+ * validate the version.
+ * @type {String}
+ */
+ get remote() {
+ return this._remote;
+ }
+
+ /**
+ * @ignore
+ */
+ set remote(url) {
+ let version;
+ try {
+ version = url.match(/\/(v\d+)\/?$/)[1];
+ } catch (err) {
+ throw new Error("The remote URL must contain the version: " + url);
+ }
+ if (version !== SUPPORTED_PROTOCOL_VERSION) {
+ throw new Error(`Unsupported protocol version: ${ version }`);
+ }
+ this._remote = url;
+ this._version = version;
+ }
+
+ /**
+ * The current server protocol version, eg. `v1`.
+ * @type {String}
+ */
+ get version() {
+ return this._version;
+ }
+
+ /**
+ * Backoff remaining time, in milliseconds. Defaults to zero if no backoff is
+ * ongoing.
+ *
+ * @type {Number}
+ */
+ get backoff() {
+ const currentTime = new Date().getTime();
+ if (this._backoffReleaseTime && currentTime < this._backoffReleaseTime) {
+ return this._backoffReleaseTime - currentTime;
+ }
+ return 0;
+ }
+
+ /**
+ * Registers HTTP events.
+ * @private
+ */
+ _registerHTTPEvents() {
+ // Prevent registering event from a batch client instance
+ if (!this._isBatch) {
+ this.events.on("backoff", backoffMs => {
+ this._backoffReleaseTime = backoffMs;
+ });
+ }
+ }
+
+ /**
+ * Retrieve a bucket object to perform operations on it.
+ *
+ * @param {String} name The bucket name.
+ * @param {Object} [options={}] The request options.
+ * @param {Boolean} [options.safe] The resulting safe option.
+ * @param {String} [options.bucket] The resulting bucket name option.
+ * @param {Object} [options.headers] The extended headers object option.
+ * @return {Bucket}
+ */
+ bucket(name, options = {}) {
+ const bucketOptions = (0, _utils.omit)(this._getRequestOptions(options), "bucket");
+ return new _bucket2.default(this, name, bucketOptions);
+ }
+
+ /**
+ * Generates a request options object, deeply merging the client configured
+ * defaults with the ones provided as argument.
+ *
+ * Note: Headers won't be overriden but merged with instance default ones.
+ *
+ * @private
+ * @param {Object} [options={}] The request options.
+ * @property {Boolean} [options.safe] The resulting safe option.
+ * @property {String} [options.bucket] The resulting bucket name option.
+ * @property {Object} [options.headers] The extended headers object option.
+ * @return {Object}
+ */
+ _getRequestOptions(options = {}) {
+ return _extends({}, this.defaultReqOptions, options, {
+ batch: this._isBatch,
+ // Note: headers should never be overriden but extended
+ headers: _extends({}, this.defaultReqOptions.headers, options.headers)
+ });
+ }
+
+ /**
+ * Retrieves server information and persist them locally. This operation is
+ * usually performed a single time during the instance lifecycle.
+ *
+ * @param {Object} [options={}] The request options.
+ * @return {Promise<Object, Error>}
+ */
+ fetchServerInfo(options = {}) {
+ if (this.serverInfo) {
+ return Promise.resolve(this.serverInfo);
+ }
+ return this.http.request(this.remote + (0, _endpoint2.default)("root"), {
+ headers: _extends({}, this.defaultReqOptions.headers, options.headers)
+ }).then(({ json }) => {
+ this.serverInfo = json;
+ return this.serverInfo;
+ });
+ }
+
+ /**
+ * Retrieves Kinto server settings.
+ *
+ * @param {Object} [options={}] The request options.
+ * @return {Promise<Object, Error>}
+ */
+
+ fetchServerSettings(options = {}) {
+ return this.fetchServerInfo(options).then(({ settings }) => settings);
+ }
+
+ /**
+ * Retrieve server capabilities information.
+ *
+ * @param {Object} [options={}] The request options.
+ * @return {Promise<Object, Error>}
+ */
+
+ fetchServerCapabilities(options = {}) {
+ return this.fetchServerInfo(options).then(({ capabilities }) => capabilities);
+ }
+
+ /**
+ * Retrieve authenticated user information.
+ *
+ * @param {Object} [options={}] The request options.
+ * @return {Promise<Object, Error>}
+ */
+
+ fetchUser(options = {}) {
+ return this.fetchServerInfo(options).then(({ user }) => user);
+ }
+
+ /**
+ * Retrieve authenticated user information.
+ *
+ * @param {Object} [options={}] The request options.
+ * @return {Promise<Object, Error>}
+ */
+
+ fetchHTTPApiVersion(options = {}) {
+ return this.fetchServerInfo(options).then(({ http_api_version }) => {
+ return http_api_version;
+ });
+ }
+
+ /**
+ * Process batch requests, chunking them according to the batch_max_requests
+ * server setting when needed.
+ *
+ * @param {Array} requests The list of batch subrequests to perform.
+ * @param {Object} [options={}] The options object.
+ * @return {Promise<Object, Error>}
+ */
+ _batchRequests(requests, options = {}) {
+ const headers = _extends({}, this.defaultReqOptions.headers, options.headers);
+ if (!requests.length) {
+ return Promise.resolve([]);
+ }
+ return this.fetchServerSettings().then(serverSettings => {
+ const maxRequests = serverSettings["batch_max_requests"];
+ if (maxRequests && requests.length > maxRequests) {
+ const chunks = (0, _utils.partition)(requests, maxRequests);
+ return (0, _utils.pMap)(chunks, chunk => this._batchRequests(chunk, options));
+ }
+ return this.execute({
+ path: (0, _endpoint2.default)("batch"),
+ method: "POST",
+ headers: headers,
+ body: {
+ defaults: { headers },
+ requests: requests
+ }
+ })
+ // we only care about the responses
+ .then(({ responses }) => responses);
+ });
+ }
+
+ /**
+ * Sends batch requests to the remote server.
+ *
+ * Note: Reserved for internal use only.
+ *
+ * @ignore
+ * @param {Function} fn The function to use for describing batch ops.
+ * @param {Object} [options={}] The options object.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {String} [options.bucket] The bucket name option.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.aggregate=false] Produces an aggregated result object.
+ * @return {Promise<Object, Error>}
+ */
+
+ batch(fn, options = {}) {
+ const rootBatch = new KintoClientBase(this.remote, _extends({}, this._options, this._getRequestOptions(options), {
+ batch: true
+ }));
+ let bucketBatch, collBatch;
+ if (options.bucket) {
+ bucketBatch = rootBatch.bucket(options.bucket);
+ if (options.collection) {
+ collBatch = bucketBatch.collection(options.collection);
+ }
+ }
+ const batchClient = collBatch || bucketBatch || rootBatch;
+ try {
+ fn(batchClient);
+ } catch (err) {
+ return Promise.reject(err);
+ }
+ return this._batchRequests(rootBatch._requests, options).then(responses => {
+ if (options.aggregate) {
+ return (0, _batch.aggregate)(responses, rootBatch._requests);
+ }
+ return responses;
+ });
+ }
+
+ /**
+ * Executes an atomic HTTP request.
+ *
+ * @private
+ * @param {Object} request The request object.
+ * @param {Object} [options={}] The options object.
+ * @param {Boolean} [options.raw=false] If true, resolve with full response object, including json body and headers instead of just json.
+ * @return {Promise<Object, Error>}
+ */
+ execute(request, options = { raw: false }) {
+ // If we're within a batch, add the request to the stack to send at once.
+ if (this._isBatch) {
+ this._requests.push(request);
+ // Resolve with a message in case people attempt at consuming the result
+ // from within a batch operation.
+ const msg = "This result is generated from within a batch " + "operation and should not be consumed.";
+ return Promise.resolve(options.raw ? { json: msg } : msg);
+ }
+ const promise = this.fetchServerSettings().then(_ => {
+ return this.http.request(this.remote + request.path, _extends({}, request, {
+ body: JSON.stringify(request.body)
+ }));
+ });
+ return options.raw ? promise : promise.then(({ json }) => json);
+ }
+
+ /**
+ * Retrieves the list of buckets.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object[], Error>}
+ */
+ listBuckets(options = {}) {
+ return this.execute({
+ path: (0, _endpoint2.default)("bucket"),
+ headers: _extends({}, this.defaultReqOptions.headers, options.headers)
+ });
+ }
+
+ /**
+ * Creates a new bucket on the server.
+ *
+ * @param {String} id The bucket name.
+ * @param {Object} [options={}] The options object.
+ * @param {Boolean} [options.data] The bucket data option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ createBucket(id, options = {}) {
+ if (!id) {
+ throw new Error("A bucket id is required.");
+ }
+ // Note that we simply ignore any "bucket" option passed here, as the one
+ // we're interested in is the one provided as a required argument.
+ const reqOptions = this._getRequestOptions(options);
+ const { data = {}, permissions } = reqOptions;
+ data.id = id;
+ const path = (0, _endpoint2.default)("bucket", id);
+ return this.execute(requests.createRequest(path, { data, permissions }, reqOptions));
+ }
+
+ /**
+ * Deletes a bucket from the server.
+ *
+ * @ignore
+ * @param {Object|String} bucket The bucket to delete.
+ * @param {Object} [options={}] The options object.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ deleteBucket(bucket, options = {}) {
+ const bucketObj = (0, _utils.toDataBody)(bucket);
+ if (!bucketObj.id) {
+ throw new Error("A bucket id is required.");
+ }
+ const path = (0, _endpoint2.default)("bucket", bucketObj.id);
+ const { last_modified } = { bucketObj };
+ const reqOptions = this._getRequestOptions(_extends({ last_modified }, options));
+ return this.execute(requests.deleteRequest(path, reqOptions));
+ }
+
+ /**
+ * Deletes all buckets on the server.
+ *
+ * @ignore
+ * @param {Object} [options={}] The options object.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+
+ deleteBuckets(options = {}) {
+ const reqOptions = this._getRequestOptions(options);
+ const path = (0, _endpoint2.default)("bucket");
+ return this.execute(requests.deleteRequest(path, reqOptions));
+ }
+}, (_applyDecoratedDescriptor(_class.prototype, "fetchServerSettings", [_dec], Object.getOwnPropertyDescriptor(_class.prototype, "fetchServerSettings"), _class.prototype), _applyDecoratedDescriptor(_class.prototype, "fetchServerCapabilities", [_dec2], Object.getOwnPropertyDescriptor(_class.prototype, "fetchServerCapabilities"), _class.prototype), _applyDecoratedDescriptor(_class.prototype, "fetchUser", [_dec3], Object.getOwnPropertyDescriptor(_class.prototype, "fetchUser"), _class.prototype), _applyDecoratedDescriptor(_class.prototype, "fetchHTTPApiVersion", [_dec4], Object.getOwnPropertyDescriptor(_class.prototype, "fetchHTTPApiVersion"), _class.prototype), _applyDecoratedDescriptor(_class.prototype, "batch", [_dec5], Object.getOwnPropertyDescriptor(_class.prototype, "batch"), _class.prototype), _applyDecoratedDescriptor(_class.prototype, "deleteBuckets", [_dec6], Object.getOwnPropertyDescriptor(_class.prototype, "deleteBuckets"), _class.prototype)), _class));
+exports.default = KintoClientBase;
+
+},{"./batch":3,"./bucket":4,"./endpoint":6,"./http":8,"./requests":9,"./utils":10}],3:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.aggregate = aggregate;
+/**
+ * Exports batch responses as a result object.
+ *
+ * @private
+ * @param {Array} responses The batch subrequest responses.
+ * @param {Array} requests The initial issued requests.
+ * @return {Object}
+ */
+function aggregate(responses = [], requests = []) {
+ if (responses.length !== requests.length) {
+ throw new Error("Responses length should match requests one.");
+ }
+ const results = {
+ errors: [],
+ published: [],
+ conflicts: [],
+ skipped: []
+ };
+ return responses.reduce((acc, response, index) => {
+ const { status } = response;
+ if (status >= 200 && status < 400) {
+ acc.published.push(response.body);
+ } else if (status === 404) {
+ acc.skipped.push(response.body);
+ } else if (status === 412) {
+ acc.conflicts.push({
+ // XXX: specifying the type is probably superfluous
+ type: "outgoing",
+ local: requests[index].body,
+ remote: response.body.details && response.body.details.existing || null
+ });
+ } else {
+ acc.errors.push({
+ path: response.path,
+ sent: requests[index],
+ error: response.body
+ });
+ }
+ return acc;
+ }, results);
+}
+
+},{}],4:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = undefined;
+
+var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
+
+var _utils = require("./utils");
+
+var _collection = require("./collection");
+
+var _collection2 = _interopRequireDefault(_collection);
+
+var _requests = require("./requests");
+
+var requests = _interopRequireWildcard(_requests);
+
+var _endpoint = require("./endpoint");
+
+var _endpoint2 = _interopRequireDefault(_endpoint);
+
+function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+/**
+ * Abstract representation of a selected bucket.
+ *
+ */
+let Bucket = class Bucket {
+ /**
+ * Constructor.
+ *
+ * @param {KintoClient} client The client instance.
+ * @param {String} name The bucket name.
+ * @param {Object} [options={}] The headers object option.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ */
+ constructor(client, name, options = {}) {
+ /**
+ * @ignore
+ */
+ this.client = client;
+ /**
+ * The bucket name.
+ * @type {String}
+ */
+ this.name = name;
+ /**
+ * The default options object.
+ * @ignore
+ * @type {Object}
+ */
+ this.options = options;
+ /**
+ * @ignore
+ */
+ this._isBatch = !!options.batch;
+ }
+
+ /**
+ * Merges passed request options with default bucket ones, if any.
+ *
+ * @private
+ * @param {Object} [options={}] The options to merge.
+ * @return {Object} The merged options.
+ */
+ _bucketOptions(options = {}) {
+ const headers = _extends({}, this.options && this.options.headers, options.headers);
+ return _extends({}, this.options, options, {
+ headers,
+ bucket: this.name,
+ batch: this._isBatch
+ });
+ }
+
+ /**
+ * Selects a collection.
+ *
+ * @param {String} name The collection name.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @return {Collection}
+ */
+ collection(name, options = {}) {
+ return new _collection2.default(this.client, this, name, this._bucketOptions(options));
+ }
+
+ /**
+ * Retrieves bucket data.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ getData(options = {}) {
+ return this.client.execute({
+ path: (0, _endpoint2.default)("bucket", this.name),
+ headers: _extends({}, this.options.headers, options.headers)
+ }).then(res => res.data);
+ }
+
+ /**
+ * Set bucket data.
+ * @param {Object} data The bucket data object.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Boolean} [options.patch] The patch option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ setData(data, options = {}) {
+ if (!(0, _utils.isObject)(data)) {
+ throw new Error("A bucket object is required.");
+ }
+
+ const bucket = _extends({}, data, { id: this.name });
+
+ // For default bucket, we need to drop the id from the data object.
+ // Bug in Kinto < 3.1.1
+ const bucketId = bucket.id;
+ if (bucket.id === "default") {
+ delete bucket.id;
+ }
+
+ const path = (0, _endpoint2.default)("bucket", bucketId);
+ const { permissions } = options;
+ const reqOptions = _extends({}, this._bucketOptions(options));
+ const request = requests.updateRequest(path, { data: bucket, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Retrieves the list of collections in the current bucket.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Array<Object>, Error>}
+ */
+ listCollections(options = {}) {
+ return this.client.execute({
+ path: (0, _endpoint2.default)("collection", this.name),
+ headers: _extends({}, this.options.headers, options.headers)
+ });
+ }
+
+ /**
+ * Creates a new collection in current bucket.
+ *
+ * @param {String|undefined} id The collection id.
+ * @param {Object} [options={}] The options object.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Object} [options.permissions] The permissions object.
+ * @param {Object} [options.data] The data object.
+ * @return {Promise<Object, Error>}
+ */
+ createCollection(id, options = {}) {
+ const reqOptions = this._bucketOptions(options);
+ const { permissions, data = {} } = reqOptions;
+ data.id = id;
+ const path = (0, _endpoint2.default)("collection", this.name, id);
+ const request = requests.createRequest(path, { data, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Deletes a collection from the current bucket.
+ *
+ * @param {Object|String} collection The collection to delete.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ deleteCollection(collection, options = {}) {
+ const collectionObj = (0, _utils.toDataBody)(collection);
+ if (!collectionObj.id) {
+ throw new Error("A collection id is required.");
+ }
+ const { id, last_modified } = collectionObj;
+ const reqOptions = this._bucketOptions(_extends({ last_modified }, options));
+ const path = (0, _endpoint2.default)("collection", this.name, id);
+ const request = requests.deleteRequest(path, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Retrieves the list of groups in the current bucket.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Array<Object>, Error>}
+ */
+ listGroups(options = {}) {
+ return this.client.execute({
+ path: (0, _endpoint2.default)("group", this.name),
+ headers: _extends({}, this.options.headers, options.headers)
+ });
+ }
+
+ /**
+ * Creates a new group in current bucket.
+ *
+ * @param {String} id The group id.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ getGroup(id, options = {}) {
+ return this.client.execute({
+ path: (0, _endpoint2.default)("group", this.name, id),
+ headers: _extends({}, this.options.headers, options.headers)
+ });
+ }
+
+ /**
+ * Creates a new group in current bucket.
+ *
+ * @param {String|undefined} id The group id.
+ * @param {Array<String>} [members=[]] The list of principals.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.data] The data object.
+ * @param {Object} [options.permissions] The permissions object.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ createGroup(id, members = [], options = {}) {
+ const reqOptions = this._bucketOptions(options);
+ const data = _extends({}, options.data, {
+ id,
+ members
+ });
+ const path = (0, _endpoint2.default)("group", this.name, id);
+ const { permissions } = options;
+ const request = requests.createRequest(path, { data, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Updates an existing group in current bucket.
+ *
+ * @param {Object} group The group object.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.data] The data object.
+ * @param {Object} [options.permissions] The permissions object.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ updateGroup(group, options = {}) {
+ if (!(0, _utils.isObject)(group)) {
+ throw new Error("A group object is required.");
+ }
+ if (!group.id) {
+ throw new Error("A group id is required.");
+ }
+ const reqOptions = this._bucketOptions(options);
+ const data = _extends({}, options.data, group);
+ const path = (0, _endpoint2.default)("group", this.name, group.id);
+ const { permissions } = options;
+ const request = requests.updateRequest(path, { data, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Deletes a group from the current bucket.
+ *
+ * @param {Object|String} group The group to delete.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ deleteGroup(group, options = {}) {
+ const groupObj = (0, _utils.toDataBody)(group);
+ const { id, last_modified } = groupObj;
+ const reqOptions = this._bucketOptions(_extends({ last_modified }, options));
+ const path = (0, _endpoint2.default)("group", this.name, id);
+ const request = requests.deleteRequest(path, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Retrieves the list of permissions for this bucket.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ getPermissions(options = {}) {
+ return this.client.execute({
+ path: (0, _endpoint2.default)("bucket", this.name),
+ headers: _extends({}, this.options.headers, options.headers)
+ }).then(res => res.permissions);
+ }
+
+ /**
+ * Replaces all existing bucket permissions with the ones provided.
+ *
+ * @param {Object} permissions The permissions object.
+ * @param {Object} [options={}] The options object
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Object} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ setPermissions(permissions, options = {}) {
+ if (!(0, _utils.isObject)(permissions)) {
+ throw new Error("A permissions object is required.");
+ }
+ const path = (0, _endpoint2.default)("bucket", this.name);
+ const reqOptions = _extends({}, this._bucketOptions(options));
+ const { last_modified } = options;
+ const data = { last_modified };
+ const request = requests.updateRequest(path, { data, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Performs batch operations at the current bucket level.
+ *
+ * @param {Function} fn The batch operation function.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Boolean} [options.aggregate] Produces a grouped result object.
+ * @return {Promise<Object, Error>}
+ */
+ batch(fn, options = {}) {
+ return this.client.batch(fn, this._bucketOptions(options));
+ }
+};
+exports.default = Bucket;
+
+},{"./collection":5,"./endpoint":6,"./requests":9,"./utils":10}],5:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = undefined;
+
+var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
+
+var _utils = require("./utils");
+
+var _requests = require("./requests");
+
+var requests = _interopRequireWildcard(_requests);
+
+var _endpoint = require("./endpoint");
+
+var _endpoint2 = _interopRequireDefault(_endpoint);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
+
+/**
+ * Abstract representation of a selected collection.
+ *
+ */
+let Collection = class Collection {
+ /**
+ * Constructor.
+ *
+ * @param {KintoClient} client The client instance.
+ * @param {Bucket} bucket The bucket instance.
+ * @param {String} name The collection name.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ */
+ constructor(client, bucket, name, options = {}) {
+ /**
+ * @ignore
+ */
+ this.client = client;
+ /**
+ * @ignore
+ */
+ this.bucket = bucket;
+ /**
+ * The collection name.
+ * @type {String}
+ */
+ this.name = name;
+
+ /**
+ * The default collection options object, embedding the default bucket ones.
+ * @ignore
+ * @type {Object}
+ */
+ this.options = _extends({}, this.bucket.options, options, {
+ headers: _extends({}, this.bucket.options && this.bucket.options.headers, options.headers)
+ });
+ /**
+ * @ignore
+ */
+ this._isBatch = !!options.batch;
+ }
+
+ /**
+ * Merges passed request options with default bucket and collection ones, if
+ * any.
+ *
+ * @private
+ * @param {Object} [options={}] The options to merge.
+ * @return {Object} The merged options.
+ */
+ _collOptions(options = {}) {
+ const headers = _extends({}, this.options && this.options.headers, options.headers);
+ return _extends({}, this.options, options, {
+ headers
+ });
+ }
+
+ /**
+ * Retrieves collection data.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ getData(options = {}) {
+ const { headers } = this._collOptions(options);
+ return this.client.execute({
+ path: (0, _endpoint2.default)("collection", this.bucket.name, this.name),
+ headers
+ }).then(res => res.data);
+ }
+
+ /**
+ * Set collection data.
+ * @param {Object} data The collection data object.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Boolean} [options.patch] The patch option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ setData(data, options = {}) {
+ if (!(0, _utils.isObject)(data)) {
+ throw new Error("A collection object is required.");
+ }
+ const reqOptions = this._collOptions(options);
+ const { permissions } = reqOptions;
+
+ const path = (0, _endpoint2.default)("collection", this.bucket.name, this.name);
+ const request = requests.updateRequest(path, { data, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Retrieves the list of permissions for this collection.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ getPermissions(options = {}) {
+ const { headers } = this._collOptions(options);
+ return this.client.execute({
+ path: (0, _endpoint2.default)("collection", this.bucket.name, this.name),
+ headers
+ }).then(res => res.permissions);
+ }
+
+ /**
+ * Replaces all existing collection permissions with the ones provided.
+ *
+ * @param {Object} permissions The permissions object.
+ * @param {Object} [options={}] The options object
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ setPermissions(permissions, options = {}) {
+ if (!(0, _utils.isObject)(permissions)) {
+ throw new Error("A permissions object is required.");
+ }
+ const reqOptions = this._collOptions(options);
+ const path = (0, _endpoint2.default)("collection", this.bucket.name, this.name);
+ const data = { last_modified: options.last_modified };
+ const request = requests.updateRequest(path, { data, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Creates a record in current collection.
+ *
+ * @param {Object} record The record to create.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @return {Promise<Object, Error>}
+ */
+ createRecord(record, options = {}) {
+ const reqOptions = this._collOptions(options);
+ const { permissions } = reqOptions;
+ const path = (0, _endpoint2.default)("record", this.bucket.name, this.name, record.id);
+ const request = requests.createRequest(path, { data: record, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Updates a record in current collection.
+ *
+ * @param {Object} record The record to update.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ updateRecord(record, options = {}) {
+ if (!(0, _utils.isObject)(record)) {
+ throw new Error("A record object is required.");
+ }
+ if (!record.id) {
+ throw new Error("A record id is required.");
+ }
+ const reqOptions = this._collOptions(options);
+ const { permissions } = reqOptions;
+ const path = (0, _endpoint2.default)("record", this.bucket.name, this.name, record.id);
+ const request = requests.updateRequest(path, { data: record, permissions }, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Deletes a record from the current collection.
+ *
+ * @param {Object|String} record The record to delete.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Number} [options.last_modified] The last_modified option.
+ * @return {Promise<Object, Error>}
+ */
+ deleteRecord(record, options = {}) {
+ const recordObj = (0, _utils.toDataBody)(record);
+ if (!recordObj.id) {
+ throw new Error("A record id is required.");
+ }
+ const { id, last_modified } = recordObj;
+ const reqOptions = this._collOptions(_extends({ last_modified }, options));
+ const path = (0, _endpoint2.default)("record", this.bucket.name, this.name, id);
+ const request = requests.deleteRequest(path, reqOptions);
+ return this.client.execute(request);
+ }
+
+ /**
+ * Retrieves a record from the current collection.
+ *
+ * @param {String} id The record id to retrieve.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @return {Promise<Object, Error>}
+ */
+ getRecord(id, options = {}) {
+ return this.client.execute(_extends({
+ path: (0, _endpoint2.default)("record", this.bucket.name, this.name, id)
+ }, this._collOptions(options)));
+ }
+
+ /**
+ * Lists records from the current collection.
+ *
+ * Sorting is done by passing a `sort` string option:
+ *
+ * - The field to order the results by, prefixed with `-` for descending.
+ * Default: `-last_modified`.
+ *
+ * @see http://kinto.readthedocs.io/en/stable/core/api/resource.html#sorting
+ *
+ * Filtering is done by passing a `filters` option object:
+ *
+ * - `{fieldname: "value"}`
+ * - `{min_fieldname: 4000}`
+ * - `{in_fieldname: "1,2,3"}`
+ * - `{not_fieldname: 0}`
+ * - `{exclude_fieldname: "0,1"}`
+ *
+ * @see http://kinto.readthedocs.io/en/stable/core/api/resource.html#filtering
+ *
+ * Paginating is done by passing a `limit` option, then calling the `next()`
+ * method from the resolved result object to fetch the next page, if any.
+ *
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Object} [options.filters=[]] The filters object.
+ * @param {String} [options.sort="-last_modified"] The sort field.
+ * @param {String} [options.limit=null] The limit field.
+ * @param {String} [options.pages=1] The number of result pages to aggregate.
+ * @param {Number} [options.since=null] Only retrieve records modified since the provided timestamp.
+ * @return {Promise<Object, Error>}
+ */
+ listRecords(options = {}) {
+ const { http } = this.client;
+ const { sort, filters, limit, pages, since } = _extends({
+ sort: "-last_modified"
+ }, options);
+ // Safety/Consistency check on ETag value.
+ if (since && typeof since !== "string") {
+ throw new Error(`Invalid value for since (${ since }), should be ETag value.`);
+ }
+ const collHeaders = this.options.headers;
+ const path = (0, _endpoint2.default)("record", this.bucket.name, this.name);
+ const querystring = (0, _utils.qsify)(_extends({}, filters, {
+ _sort: sort,
+ _limit: limit,
+ _since: since
+ }));
+ let results = [],
+ current = 0;
+
+ const next = function (nextPage) {
+ if (!nextPage) {
+ throw new Error("Pagination exhausted.");
+ }
+ return processNextPage(nextPage);
+ };
+
+ const processNextPage = nextPage => {
+ return http.request(nextPage, { headers: collHeaders }).then(handleResponse);
+ };
+
+ const pageResults = (results, nextPage, etag) => {
+ // ETag string is supposed to be opaque and stored «as-is».
+ // ETag header values are quoted (because of * and W/"foo").
+ return {
+ last_modified: etag ? etag.replace(/"/g, "") : etag,
+ data: results,
+ next: next.bind(null, nextPage)
+ };
+ };
+
+ const handleResponse = ({ headers, json }) => {
+ const nextPage = headers.get("Next-Page");
+ const etag = headers.get("ETag");
+ if (!pages) {
+ return pageResults(json.data, nextPage, etag);
+ }
+ // Aggregate new results with previous ones
+ results = results.concat(json.data);
+ current += 1;
+ if (current >= pages || !nextPage) {
+ // Pagination exhausted
+ return pageResults(results, nextPage, etag);
+ }
+ // Follow next page
+ return processNextPage(nextPage);
+ };
+
+ return this.client.execute(_extends({
+ path: path + "?" + querystring
+ }, this._collOptions(options)), { raw: true }).then(handleResponse);
+ }
+
+ /**
+ * Performs batch operations at the current collection level.
+ *
+ * @param {Function} fn The batch operation function.
+ * @param {Object} [options={}] The options object.
+ * @param {Object} [options.headers] The headers object option.
+ * @param {Boolean} [options.safe] The safe option.
+ * @param {Boolean} [options.aggregate] Produces a grouped result object.
+ * @return {Promise<Object, Error>}
+ */
+ batch(fn, options = {}) {
+ const reqOptions = this._collOptions(options);
+ return this.client.batch(fn, _extends({}, reqOptions, {
+ bucket: this.bucket.name,
+ collection: this.name
+ }));
+ }
+};
+exports.default = Collection;
+
+},{"./endpoint":6,"./requests":9,"./utils":10}],6:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = endpoint;
+/**
+ * Endpoints templates.
+ * @type {Object}
+ */
+const ENDPOINTS = {
+ root: () => "/",
+ batch: () => "/batch",
+ bucket: bucket => "/buckets" + (bucket ? `/${ bucket }` : ""),
+ collection: (bucket, coll) => `${ ENDPOINTS.bucket(bucket) }/collections` + (coll ? `/${ coll }` : ""),
+ group: (bucket, group) => `${ ENDPOINTS.bucket(bucket) }/groups` + (group ? `/${ group }` : ""),
+ record: (bucket, coll, id) => `${ ENDPOINTS.collection(bucket, coll) }/records` + (id ? `/${ id }` : "")
+};
+
+/**
+ * Retrieves a server enpoint by its name.
+ *
+ * @private
+ * @param {String} name The endpoint name.
+ * @param {...string} args The endpoint parameters.
+ * @return {String}
+ */
+function endpoint(name, ...args) {
+ return ENDPOINTS[name](...args);
+}
+
+},{}],7:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+/**
+ * Kinto server error code descriptors.
+ * @type {Object}
+ */
+exports.default = {
+ 104: "Missing Authorization Token",
+ 105: "Invalid Authorization Token",
+ 106: "Request body was not valid JSON",
+ 107: "Invalid request parameter",
+ 108: "Missing request parameter",
+ 109: "Invalid posted data",
+ 110: "Invalid Token / id",
+ 111: "Missing Token / id",
+ 112: "Content-Length header was not provided",
+ 113: "Request body too large",
+ 114: "Resource was modified meanwhile",
+ 115: "Method not allowed on this end point (hint: server may be readonly)",
+ 116: "Requested version not available on this server",
+ 117: "Client has sent too many requests",
+ 121: "Resource access is forbidden for this user",
+ 122: "Another resource violates constraint",
+ 201: "Service Temporary unavailable due to high load",
+ 202: "Service deprecated",
+ 999: "Internal Server Error"
+};
+
+},{}],8:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = undefined;
+
+var _errors = require("./errors");
+
+var _errors2 = _interopRequireDefault(_errors);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+/**
+ * Enhanced HTTP client for the Kinto protocol.
+ * @private
+ */
+let HTTP = class HTTP {
+ /**
+ * Default HTTP request headers applied to each outgoing request.
+ *
+ * @type {Object}
+ */
+ static get DEFAULT_REQUEST_HEADERS() {
+ return {
+ "Accept": "application/json",
+ "Content-Type": "application/json"
+ };
+ }
+
+ /**
+ * Default options.
+ *
+ * @type {Object}
+ */
+ static get defaultOptions() {
+ return { timeout: 5000, requestMode: "cors" };
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param {EventEmitter} events The event handler.
+ * @param {Object} [options={}} The options object.
+ * @param {Number} [options.timeout=5000] The request timeout in ms (default: `5000`).
+ * @param {String} [options.requestMode="cors"] The HTTP request mode (default: `"cors"`).
+ */
+ constructor(events, options = {}) {
+ // public properties
+ /**
+ * The event emitter instance.
+ * @type {EventEmitter}
+ */
+ if (!events) {
+ throw new Error("No events handler provided");
+ }
+ this.events = events;
+
+ /**
+ * The request mode.
+ * @see https://fetch.spec.whatwg.org/#requestmode
+ * @type {String}
+ */
+ this.requestMode = options.requestMode || HTTP.defaultOptions.requestMode;
+
+ /**
+ * The request timeout.
+ * @type {Number}
+ */
+ this.timeout = options.timeout || HTTP.defaultOptions.timeout;
+ }
+
+ /**
+ * Performs an HTTP request to the Kinto server.
+ *
+ * Resolves with an objet containing the following HTTP response properties:
+ * - `{Number} status` The HTTP status code.
+ * - `{Object} json` The JSON response body.
+ * - `{Headers} headers` The response headers object; see the ES6 fetch() spec.
+ *
+ * @param {String} url The URL.
+ * @param {Object} [options={}] The fetch() options object.
+ * @param {Object} [options.headers] The request headers object (default: {})
+ * @return {Promise}
+ */
+ request(url, options = { headers: {} }) {
+ let response, status, statusText, headers, hasTimedout;
+ // Ensure default request headers are always set
+ options.headers = Object.assign({}, HTTP.DEFAULT_REQUEST_HEADERS, options.headers);
+ options.mode = this.requestMode;
+ return new Promise((resolve, reject) => {
+ const _timeoutId = setTimeout(() => {
+ hasTimedout = true;
+ reject(new Error("Request timeout."));
+ }, this.timeout);
+ fetch(url, options).then(res => {
+ if (!hasTimedout) {
+ clearTimeout(_timeoutId);
+ resolve(res);
+ }
+ }).catch(err => {
+ if (!hasTimedout) {
+ clearTimeout(_timeoutId);
+ reject(err);
+ }
+ });
+ }).then(res => {
+ response = res;
+ headers = res.headers;
+ status = res.status;
+ statusText = res.statusText;
+ this._checkForDeprecationHeader(headers);
+ this._checkForBackoffHeader(status, headers);
+ this._checkForRetryAfterHeader(status, headers);
+ return res.text();
+ })
+ // Check if we have a body; if so parse it as JSON.
+ .then(text => {
+ if (text.length === 0) {
+ return null;
+ }
+ // Note: we can't consume the response body twice.
+ return JSON.parse(text);
+ }).catch(err => {
+ const error = new Error(`HTTP ${ status || 0 }; ${ err }`);
+ error.response = response;
+ error.stack = err.stack;
+ throw error;
+ }).then(json => {
+ if (json && status >= 400) {
+ let message = `HTTP ${ status } ${ json.error || "" }: `;
+ if (json.errno && json.errno in _errors2.default) {
+ const errnoMsg = _errors2.default[json.errno];
+ message += errnoMsg;
+ if (json.message && json.message !== errnoMsg) {
+ message += ` (${ json.message })`;
+ }
+ } else {
+ message += statusText || "";
+ }
+ const error = new Error(message.trim());
+ error.response = response;
+ error.data = json;
+ throw error;
+ }
+ return { status, json, headers };
+ });
+ }
+
+ _checkForDeprecationHeader(headers) {
+ const alertHeader = headers.get("Alert");
+ if (!alertHeader) {
+ return;
+ }
+ let alert;
+ try {
+ alert = JSON.parse(alertHeader);
+ } catch (err) {
+ console.warn("Unable to parse Alert header message", alertHeader);
+ return;
+ }
+ console.warn(alert.message, alert.url);
+ this.events.emit("deprecated", alert);
+ }
+
+ _checkForBackoffHeader(status, headers) {
+ let backoffMs;
+ const backoffSeconds = parseInt(headers.get("Backoff"), 10);
+ if (backoffSeconds > 0) {
+ backoffMs = new Date().getTime() + backoffSeconds * 1000;
+ } else {
+ backoffMs = 0;
+ }
+ this.events.emit("backoff", backoffMs);
+ }
+
+ _checkForRetryAfterHeader(status, headers) {
+ let retryAfter = headers.get("Retry-After");
+ if (!retryAfter) {
+ return;
+ }
+ retryAfter = new Date().getTime() + parseInt(retryAfter, 10) * 1000;
+ this.events.emit("retry-after", retryAfter);
+ }
+};
+exports.default = HTTP;
+
+},{"./errors":7}],9:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+
+var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
+
+exports.createRequest = createRequest;
+exports.updateRequest = updateRequest;
+exports.deleteRequest = deleteRequest;
+
+var _utils = require("./utils");
+
+const requestDefaults = {
+ safe: false,
+ // check if we should set default content type here
+ headers: {},
+ permissions: undefined,
+ data: undefined,
+ patch: false
+};
+
+/**
+ * @private
+ */
+function safeHeader(safe, last_modified) {
+ if (!safe) {
+ return {};
+ }
+ if (last_modified) {
+ return { "If-Match": `"${ last_modified }"` };
+ }
+ return { "If-None-Match": "*" };
+}
+
+/**
+ * @private
+ */
+function createRequest(path, { data, permissions }, options = {}) {
+ const { headers, safe } = _extends({}, requestDefaults, options);
+ return {
+ method: data && data.id ? "PUT" : "POST",
+ path,
+ headers: _extends({}, headers, safeHeader(safe)),
+ body: {
+ data,
+ permissions
+ }
+ };
+}
+
+/**
+ * @private
+ */
+function updateRequest(path, { data, permissions }, options = {}) {
+ const {
+ headers,
+ safe,
+ patch
+ } = _extends({}, requestDefaults, options);
+ const { last_modified } = _extends({}, data, options);
+
+ if (Object.keys((0, _utils.omit)(data, "id", "last_modified")).length === 0) {
+ data = undefined;
+ }
+
+ return {
+ method: patch ? "PATCH" : "PUT",
+ path,
+ headers: _extends({}, headers, safeHeader(safe, last_modified)),
+ body: {
+ data,
+ permissions
+ }
+ };
+}
+
+/**
+ * @private
+ */
+function deleteRequest(path, options = {}) {
+ const { headers, safe, last_modified } = _extends({}, requestDefaults, options);
+ if (safe && !last_modified) {
+ throw new Error("Safe concurrency check requires a last_modified value.");
+ }
+ return {
+ method: "DELETE",
+ path,
+ headers: _extends({}, headers, safeHeader(safe, last_modified))
+ };
+}
+
+},{"./utils":10}],10:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.partition = partition;
+exports.pMap = pMap;
+exports.omit = omit;
+exports.toDataBody = toDataBody;
+exports.qsify = qsify;
+exports.checkVersion = checkVersion;
+exports.support = support;
+exports.capable = capable;
+exports.nobatch = nobatch;
+exports.isObject = isObject;
+/**
+ * Chunks an array into n pieces.
+ *
+ * @private
+ * @param {Array} array
+ * @param {Number} n
+ * @return {Array}
+ */
+function partition(array, n) {
+ if (n <= 0) {
+ return array;
+ }
+ return array.reduce((acc, x, i) => {
+ if (i === 0 || i % n === 0) {
+ acc.push([x]);
+ } else {
+ acc[acc.length - 1].push(x);
+ }
+ return acc;
+ }, []);
+}
+
+/**
+ * Maps a list to promises using the provided mapping function, executes them
+ * sequentially then returns a Promise resolving with ordered results obtained.
+ * Think of this as a sequential Promise.all.
+ *
+ * @private
+ * @param {Array} list The list to map.
+ * @param {Function} fn The mapping function.
+ * @return {Promise}
+ */
+function pMap(list, fn) {
+ let results = [];
+ return list.reduce((promise, entry) => {
+ return promise.then(() => {
+ return Promise.resolve(fn(entry)).then(result => results = results.concat(result));
+ });
+ }, Promise.resolve()).then(() => results);
+}
+
+/**
+ * Takes an object and returns a copy of it with the provided keys omitted.
+ *
+ * @private
+ * @param {Object} obj The source object.
+ * @param {...String} keys The keys to omit.
+ * @return {Object}
+ */
+function omit(obj, ...keys) {
+ return Object.keys(obj).reduce((acc, key) => {
+ if (keys.indexOf(key) === -1) {
+ acc[key] = obj[key];
+ }
+ return acc;
+ }, {});
+}
+
+/**
+ * Always returns a resource data object from the provided argument.
+ *
+ * @private
+ * @param {Object|String} resource
+ * @return {Object}
+ */
+function toDataBody(resource) {
+ if (isObject(resource)) {
+ return resource;
+ }
+ if (typeof resource === "string") {
+ return { id: resource };
+ }
+ throw new Error("Invalid argument.");
+}
+
+/**
+ * Transforms an object into an URL query string, stripping out any undefined
+ * values.
+ *
+ * @param {Object} obj
+ * @return {String}
+ */
+function qsify(obj) {
+ const sep = "&";
+ const encode = v => encodeURIComponent(typeof v === "boolean" ? String(v) : v);
+ const stripUndefined = o => JSON.parse(JSON.stringify(o));
+ const stripped = stripUndefined(obj);
+ return Object.keys(stripped).map(k => {
+ const ks = encode(k) + "=";
+ if (Array.isArray(stripped[k])) {
+ return stripped[k].map(v => ks + encode(v)).join(sep);
+ } else {
+ return ks + encode(stripped[k]);
+ }
+ }).join(sep);
+}
+
+/**
+ * Checks if a version is within the provided range.
+ *
+ * @param {String} version The version to check.
+ * @param {String} minVersion The minimum supported version (inclusive).
+ * @param {String} maxVersion The minimum supported version (exclusive).
+ * @throws {Error} If the version is outside of the provided range.
+ */
+function checkVersion(version, minVersion, maxVersion) {
+ const extract = str => str.split(".").map(x => parseInt(x, 10));
+ const [verMajor, verMinor] = extract(version);
+ const [minMajor, minMinor] = extract(minVersion);
+ const [maxMajor, maxMinor] = extract(maxVersion);
+ const checks = [verMajor < minMajor, verMajor === minMajor && verMinor < minMinor, verMajor > maxMajor, verMajor === maxMajor && verMinor >= maxMinor];
+ if (checks.some(x => x)) {
+ throw new Error(`Version ${ version } doesn't satisfy ` + `${ minVersion } <= x < ${ maxVersion }`);
+ }
+}
+
+/**
+ * Generates a decorator function ensuring a version check is performed against
+ * the provided requirements before executing it.
+ *
+ * @param {String} min The required min version (inclusive).
+ * @param {String} max The required max version (inclusive).
+ * @return {Function}
+ */
+function support(min, max) {
+ return function (target, key, descriptor) {
+ const fn = descriptor.value;
+ return {
+ configurable: true,
+ get() {
+ const wrappedMethod = (...args) => {
+ // "this" is the current instance which its method is decorated.
+ const client = "client" in this ? this.client : this;
+ return client.fetchHTTPApiVersion().then(version => checkVersion(version, min, max)).then(Promise.resolve(fn.apply(this, args)));
+ };
+ Object.defineProperty(this, key, {
+ value: wrappedMethod,
+ configurable: true,
+ writable: true
+ });
+ return wrappedMethod;
+ }
+ };
+ };
+}
+
+/**
+ * Generates a decorator function ensuring that the specified capabilities are
+ * available on the server before executing it.
+ *
+ * @param {Array<String>} capabilities The required capabilities.
+ * @return {Function}
+ */
+function capable(capabilities) {
+ return function (target, key, descriptor) {
+ const fn = descriptor.value;
+ return {
+ configurable: true,
+ get() {
+ const wrappedMethod = (...args) => {
+ // "this" is the current instance which its method is decorated.
+ const client = "client" in this ? this.client : this;
+ return client.fetchServerCapabilities().then(available => {
+ const missing = capabilities.filter(c => available.indexOf(c) < 0);
+ if (missing.length > 0) {
+ throw new Error(`Required capabilities ${ missing.join(", ") } ` + "not present on server");
+ }
+ }).then(Promise.resolve(fn.apply(this, args)));
+ };
+ Object.defineProperty(this, key, {
+ value: wrappedMethod,
+ configurable: true,
+ writable: true
+ });
+ return wrappedMethod;
+ }
+ };
+ };
+}
+
+/**
+ * Generates a decorator function ensuring an operation is not performed from
+ * within a batch request.
+ *
+ * @param {String} message The error message to throw.
+ * @return {Function}
+ */
+function nobatch(message) {
+ return function (target, key, descriptor) {
+ const fn = descriptor.value;
+ return {
+ configurable: true,
+ get() {
+ const wrappedMethod = (...args) => {
+ // "this" is the current instance which its method is decorated.
+ if (this._isBatch) {
+ throw new Error(message);
+ }
+ return fn.apply(this, args);
+ };
+ Object.defineProperty(this, key, {
+ value: wrappedMethod,
+ configurable: true,
+ writable: true
+ });
+ return wrappedMethod;
+ }
+ };
+ };
+}
+
+/**
+ * Returns true if the specified value is an object (i.e. not an array nor null).
+ * @param {Object} thing The value to inspect.
+ * @return {bool}
+ */
+function isObject(thing) {
+ return typeof thing === "object" && thing !== null && !Array.isArray(thing);
+}
+
+},{}]},{},[1])(1)
+}); \ No newline at end of file
diff --git a/services/common/kinto-offline-client.js b/services/common/kinto-offline-client.js
new file mode 100644
index 000000000..4d0dbd0f3
--- /dev/null
+++ b/services/common/kinto-offline-client.js
@@ -0,0 +1,4286 @@
+/*
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This file is generated from kinto.js - do not modify directly.
+ */
+
+this.EXPORTED_SYMBOLS = ["loadKinto"];
+
+/*
+ * Version 5.1.0 - 8beb61d
+ */
+
+(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.loadKinto = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+
+var _extends2 = require("babel-runtime/helpers/extends");
+
+var _extends3 = _interopRequireDefault(_extends2);
+
+var _stringify = require("babel-runtime/core-js/json/stringify");
+
+var _stringify2 = _interopRequireDefault(_stringify);
+
+var _promise = require("babel-runtime/core-js/promise");
+
+var _promise2 = _interopRequireDefault(_promise);
+
+exports.reduceRecords = reduceRecords;
+
+var _base = require("../src/adapters/base");
+
+var _base2 = _interopRequireDefault(_base);
+
+var _utils = require("../src/utils");
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Components.utils.import("resource://gre/modules/Sqlite.jsm");
+Components.utils.import("resource://gre/modules/Task.jsm");
+
+const SQLITE_PATH = "kinto.sqlite";
+
+const statements = {
+ "createCollectionData": `
+ CREATE TABLE collection_data (
+ collection_name TEXT,
+ record_id TEXT,
+ record TEXT
+ );`,
+
+ "createCollectionMetadata": `
+ CREATE TABLE collection_metadata (
+ collection_name TEXT PRIMARY KEY,
+ last_modified INTEGER
+ ) WITHOUT ROWID;`,
+
+ "createCollectionDataRecordIdIndex": `
+ CREATE UNIQUE INDEX unique_collection_record
+ ON collection_data(collection_name, record_id);`,
+
+ "clearData": `
+ DELETE FROM collection_data
+ WHERE collection_name = :collection_name;`,
+
+ "createData": `
+ INSERT INTO collection_data (collection_name, record_id, record)
+ VALUES (:collection_name, :record_id, :record);`,
+
+ "updateData": `
+ INSERT OR REPLACE INTO collection_data (collection_name, record_id, record)
+ VALUES (:collection_name, :record_id, :record);`,
+
+ "deleteData": `
+ DELETE FROM collection_data
+ WHERE collection_name = :collection_name
+ AND record_id = :record_id;`,
+
+ "saveLastModified": `
+ REPLACE INTO collection_metadata (collection_name, last_modified)
+ VALUES (:collection_name, :last_modified);`,
+
+ "getLastModified": `
+ SELECT last_modified
+ FROM collection_metadata
+ WHERE collection_name = :collection_name;`,
+
+ "getRecord": `
+ SELECT record
+ FROM collection_data
+ WHERE collection_name = :collection_name
+ AND record_id = :record_id;`,
+
+ "listRecords": `
+ SELECT record
+ FROM collection_data
+ WHERE collection_name = :collection_name;`,
+
+ // N.B. we have to have a dynamic number of placeholders, which you
+ // can't do without building your own statement. See `execute` for details
+ "listRecordsById": `
+ SELECT record_id, record
+ FROM collection_data
+ WHERE collection_name = ?
+ AND record_id IN `,
+
+ "importData": `
+ REPLACE INTO collection_data (collection_name, record_id, record)
+ VALUES (:collection_name, :record_id, :record);`,
+
+ "scanAllRecords": `SELECT * FROM collection_data;`,
+
+ "clearCollectionMetadata": `DELETE FROM collection_metadata;`
+};
+
+const createStatements = ["createCollectionData", "createCollectionMetadata", "createCollectionDataRecordIdIndex"];
+
+const currentSchemaVersion = 1;
+
+/**
+ * Firefox adapter.
+ *
+ * Uses Sqlite as a backing store.
+ *
+ * Options:
+ * - path: the filename/path for the Sqlite database. If absent, use SQLITE_PATH.
+ */
+class FirefoxAdapter extends _base2.default {
+ constructor(collection, options = {}) {
+ super();
+ const { sqliteHandle = null } = options;
+ this.collection = collection;
+ this._connection = sqliteHandle;
+ this._options = options;
+ }
+
+ // We need to be capable of calling this from "outside" the adapter
+ // so that someone can initialize a connection and pass it to us in
+ // adapterOptions.
+ static _init(connection) {
+ return Task.spawn(function* () {
+ yield connection.executeTransaction(function* doSetup() {
+ const schema = yield connection.getSchemaVersion();
+
+ if (schema == 0) {
+
+ for (let statementName of createStatements) {
+ yield connection.execute(statements[statementName]);
+ }
+
+ yield connection.setSchemaVersion(currentSchemaVersion);
+ } else if (schema != 1) {
+ throw new Error("Unknown database schema: " + schema);
+ }
+ });
+ return connection;
+ });
+ }
+
+ _executeStatement(statement, params) {
+ if (!this._connection) {
+ throw new Error("The storage adapter is not open");
+ }
+ return this._connection.executeCached(statement, params);
+ }
+
+ open() {
+ const self = this;
+ return Task.spawn(function* () {
+ if (!self._connection) {
+ const path = self._options.path || SQLITE_PATH;
+ const opts = { path, sharedMemoryCache: false };
+ self._connection = yield Sqlite.openConnection(opts).then(FirefoxAdapter._init);
+ }
+ });
+ }
+
+ close() {
+ if (this._connection) {
+ const promise = this._connection.close();
+ this._connection = null;
+ return promise;
+ }
+ return _promise2.default.resolve();
+ }
+
+ clear() {
+ const params = { collection_name: this.collection };
+ return this._executeStatement(statements.clearData, params);
+ }
+
+ execute(callback, options = { preload: [] }) {
+ if (!this._connection) {
+ throw new Error("The storage adapter is not open");
+ }
+
+ let result;
+ const conn = this._connection;
+ const collection = this.collection;
+
+ return conn.executeTransaction(function* doExecuteTransaction() {
+ // Preload specified records from DB, within transaction.
+ const parameters = [collection, ...options.preload];
+ const placeholders = options.preload.map(_ => "?");
+ const stmt = statements.listRecordsById + "(" + placeholders.join(",") + ");";
+ const rows = yield conn.execute(stmt, parameters);
+
+ const preloaded = rows.reduce((acc, row) => {
+ const record = JSON.parse(row.getResultByName("record"));
+ acc[row.getResultByName("record_id")] = record;
+ return acc;
+ }, {});
+
+ const proxy = transactionProxy(collection, preloaded);
+ result = callback(proxy);
+
+ for (let { statement, params } of proxy.operations) {
+ yield conn.executeCached(statement, params);
+ }
+ }, conn.TRANSACTION_EXCLUSIVE).then(_ => result);
+ }
+
+ get(id) {
+ const params = {
+ collection_name: this.collection,
+ record_id: id
+ };
+ return this._executeStatement(statements.getRecord, params).then(result => {
+ if (result.length == 0) {
+ return;
+ }
+ return JSON.parse(result[0].getResultByName("record"));
+ });
+ }
+
+ list(params = { filters: {}, order: "" }) {
+ const parameters = {
+ collection_name: this.collection
+ };
+ return this._executeStatement(statements.listRecords, parameters).then(result => {
+ const records = [];
+ for (let k = 0; k < result.length; k++) {
+ const row = result[k];
+ records.push(JSON.parse(row.getResultByName("record")));
+ }
+ return records;
+ }).then(results => {
+ // The resulting list of records is filtered and sorted.
+ // XXX: with some efforts, this could be implemented using SQL.
+ return reduceRecords(params.filters, params.order, results);
+ });
+ }
+
+ /**
+ * Load a list of records into the local database.
+ *
+ * Note: The adapter is not in charge of filtering the already imported
+ * records. This is done in `Collection#loadDump()`, as a common behaviour
+ * between every adapters.
+ *
+ * @param {Array} records.
+ * @return {Array} imported records.
+ */
+ loadDump(records) {
+ const connection = this._connection;
+ const collection_name = this.collection;
+ return Task.spawn(function* () {
+ yield connection.executeTransaction(function* doImport() {
+ for (let record of records) {
+ const params = {
+ collection_name: collection_name,
+ record_id: record.id,
+ record: (0, _stringify2.default)(record)
+ };
+ yield connection.execute(statements.importData, params);
+ }
+ const lastModified = Math.max(...records.map(record => record.last_modified));
+ const params = {
+ collection_name: collection_name
+ };
+ const previousLastModified = yield connection.execute(statements.getLastModified, params).then(result => {
+ return result.length > 0 ? result[0].getResultByName("last_modified") : -1;
+ });
+ if (lastModified > previousLastModified) {
+ const params = {
+ collection_name: collection_name,
+ last_modified: lastModified
+ };
+ yield connection.execute(statements.saveLastModified, params);
+ }
+ });
+ return records;
+ });
+ }
+
+ saveLastModified(lastModified) {
+ const parsedLastModified = parseInt(lastModified, 10) || null;
+ const params = {
+ collection_name: this.collection,
+ last_modified: parsedLastModified
+ };
+ return this._executeStatement(statements.saveLastModified, params).then(() => parsedLastModified);
+ }
+
+ getLastModified() {
+ const params = {
+ collection_name: this.collection
+ };
+ return this._executeStatement(statements.getLastModified, params).then(result => {
+ if (result.length == 0) {
+ return 0;
+ }
+ return result[0].getResultByName("last_modified");
+ });
+ }
+
+ /**
+ * Reset the sync status of every record and collection we have
+ * access to.
+ */
+ resetSyncStatus() {
+ // We're going to use execute instead of executeCached, so build
+ // in our own sanity check
+ if (!this._connection) {
+ throw new Error("The storage adapter is not open");
+ }
+
+ return this._connection.executeTransaction(function* (conn) {
+ const promises = [];
+ yield conn.execute(statements.scanAllRecords, null, function (row) {
+ const record = JSON.parse(row.getResultByName("record"));
+ const record_id = row.getResultByName("record_id");
+ const collection_name = row.getResultByName("collection_name");
+ if (record._status === "deleted") {
+ // Garbage collect deleted records.
+ promises.push(conn.execute(statements.deleteData, { collection_name, record_id }));
+ } else {
+ const newRecord = (0, _extends3.default)({}, record, {
+ _status: "created",
+ last_modified: undefined
+ });
+ promises.push(conn.execute(statements.updateData, { record: (0, _stringify2.default)(newRecord), record_id, collection_name }));
+ }
+ });
+ yield _promise2.default.all(promises);
+ yield conn.execute(statements.clearCollectionMetadata);
+ });
+ }
+}
+
+exports.default = FirefoxAdapter;
+function transactionProxy(collection, preloaded) {
+ const _operations = [];
+
+ return {
+ get operations() {
+ return _operations;
+ },
+
+ create(record) {
+ _operations.push({
+ statement: statements.createData,
+ params: {
+ collection_name: collection,
+ record_id: record.id,
+ record: (0, _stringify2.default)(record)
+ }
+ });
+ },
+
+ update(record) {
+ _operations.push({
+ statement: statements.updateData,
+ params: {
+ collection_name: collection,
+ record_id: record.id,
+ record: (0, _stringify2.default)(record)
+ }
+ });
+ },
+
+ delete(id) {
+ _operations.push({
+ statement: statements.deleteData,
+ params: {
+ collection_name: collection,
+ record_id: id
+ }
+ });
+ },
+
+ get(id) {
+ // Gecko JS engine outputs undesired warnings if id is not in preloaded.
+ return id in preloaded ? preloaded[id] : undefined;
+ }
+ };
+}
+
+/**
+ * Filter and sort list against provided filters and order.
+ *
+ * @param {Object} filters The filters to apply.
+ * @param {String} order The order to apply.
+ * @param {Array} list The list to reduce.
+ * @return {Array}
+ */
+function reduceRecords(filters, order, list) {
+ const filtered = filters ? (0, _utils.filterObjects)(filters, list) : list;
+ return order ? (0, _utils.sortObjects)(order, filtered) : filtered;
+}
+
+},{"../src/adapters/base":85,"../src/utils":87,"babel-runtime/core-js/json/stringify":3,"babel-runtime/core-js/promise":6,"babel-runtime/helpers/extends":8}],2:[function(require,module,exports){
+/*
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+
+var _extends2 = require("babel-runtime/helpers/extends");
+
+var _extends3 = _interopRequireDefault(_extends2);
+
+exports.default = loadKinto;
+
+var _base = require("../src/adapters/base");
+
+var _base2 = _interopRequireDefault(_base);
+
+var _KintoBase = require("../src/KintoBase");
+
+var _KintoBase2 = _interopRequireDefault(_KintoBase);
+
+var _FirefoxStorage = require("./FirefoxStorage");
+
+var _FirefoxStorage2 = _interopRequireDefault(_FirefoxStorage);
+
+var _utils = require("../src/utils");
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
+
+function loadKinto() {
+ const { EventEmitter } = Cu.import("resource://devtools/shared/event-emitter.js", {});
+ const { generateUUID } = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
+
+ // Use standalone kinto-http module landed in FFx.
+ const { KintoHttpClient } = Cu.import("resource://services-common/kinto-http-client.js");
+
+ Cu.import("resource://gre/modules/Timer.jsm");
+ Cu.importGlobalProperties(['fetch']);
+
+ // Leverage Gecko service to generate UUIDs.
+ function makeIDSchema() {
+ return {
+ validate: _utils.RE_UUID.test.bind(_utils.RE_UUID),
+ generate: function () {
+ return generateUUID().toString().replace(/[{}]/g, "");
+ }
+ };
+ }
+
+ class KintoFX extends _KintoBase2.default {
+ static get adapters() {
+ return {
+ BaseAdapter: _base2.default,
+ FirefoxAdapter: _FirefoxStorage2.default
+ };
+ }
+
+ constructor(options = {}) {
+ const emitter = {};
+ EventEmitter.decorate(emitter);
+
+ const defaults = {
+ events: emitter,
+ ApiClass: KintoHttpClient,
+ adapter: _FirefoxStorage2.default
+ };
+
+ const expandedOptions = (0, _extends3.default)({}, defaults, options);
+ super(expandedOptions);
+ }
+
+ collection(collName, options = {}) {
+ const idSchema = makeIDSchema();
+ const expandedOptions = (0, _extends3.default)({ idSchema }, options);
+ return super.collection(collName, expandedOptions);
+ }
+ }
+
+ return KintoFX;
+}
+
+// This fixes compatibility with CommonJS required by browserify.
+// See http://stackoverflow.com/questions/33505992/babel-6-changes-how-it-exports-default/33683495#33683495
+if (typeof module === "object") {
+ module.exports = loadKinto;
+}
+
+},{"../src/KintoBase":83,"../src/adapters/base":85,"../src/utils":87,"./FirefoxStorage":1,"babel-runtime/helpers/extends":8}],3:[function(require,module,exports){
+module.exports = { "default": require("core-js/library/fn/json/stringify"), __esModule: true };
+},{"core-js/library/fn/json/stringify":10}],4:[function(require,module,exports){
+module.exports = { "default": require("core-js/library/fn/object/assign"), __esModule: true };
+},{"core-js/library/fn/object/assign":11}],5:[function(require,module,exports){
+module.exports = { "default": require("core-js/library/fn/object/keys"), __esModule: true };
+},{"core-js/library/fn/object/keys":12}],6:[function(require,module,exports){
+module.exports = { "default": require("core-js/library/fn/promise"), __esModule: true };
+},{"core-js/library/fn/promise":13}],7:[function(require,module,exports){
+"use strict";
+
+exports.__esModule = true;
+
+var _promise = require("../core-js/promise");
+
+var _promise2 = _interopRequireDefault(_promise);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+exports.default = function (fn) {
+ return function () {
+ var gen = fn.apply(this, arguments);
+ return new _promise2.default(function (resolve, reject) {
+ function step(key, arg) {
+ try {
+ var info = gen[key](arg);
+ var value = info.value;
+ } catch (error) {
+ reject(error);
+ return;
+ }
+
+ if (info.done) {
+ resolve(value);
+ } else {
+ return _promise2.default.resolve(value).then(function (value) {
+ return step("next", value);
+ }, function (err) {
+ return step("throw", err);
+ });
+ }
+ }
+
+ return step("next");
+ });
+ };
+};
+},{"../core-js/promise":6}],8:[function(require,module,exports){
+"use strict";
+
+exports.__esModule = true;
+
+var _assign = require("../core-js/object/assign");
+
+var _assign2 = _interopRequireDefault(_assign);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+exports.default = _assign2.default || function (target) {
+ for (var i = 1; i < arguments.length; i++) {
+ var source = arguments[i];
+
+ for (var key in source) {
+ if (Object.prototype.hasOwnProperty.call(source, key)) {
+ target[key] = source[key];
+ }
+ }
+ }
+
+ return target;
+};
+},{"../core-js/object/assign":4}],9:[function(require,module,exports){
+
+},{}],10:[function(require,module,exports){
+var core = require('../../modules/_core')
+ , $JSON = core.JSON || (core.JSON = {stringify: JSON.stringify});
+module.exports = function stringify(it){ // eslint-disable-line no-unused-vars
+ return $JSON.stringify.apply($JSON, arguments);
+};
+},{"../../modules/_core":21}],11:[function(require,module,exports){
+require('../../modules/es6.object.assign');
+module.exports = require('../../modules/_core').Object.assign;
+},{"../../modules/_core":21,"../../modules/es6.object.assign":77}],12:[function(require,module,exports){
+require('../../modules/es6.object.keys');
+module.exports = require('../../modules/_core').Object.keys;
+},{"../../modules/_core":21,"../../modules/es6.object.keys":78}],13:[function(require,module,exports){
+require('../modules/es6.object.to-string');
+require('../modules/es6.string.iterator');
+require('../modules/web.dom.iterable');
+require('../modules/es6.promise');
+module.exports = require('../modules/_core').Promise;
+},{"../modules/_core":21,"../modules/es6.object.to-string":79,"../modules/es6.promise":80,"../modules/es6.string.iterator":81,"../modules/web.dom.iterable":82}],14:[function(require,module,exports){
+module.exports = function(it){
+ if(typeof it != 'function')throw TypeError(it + ' is not a function!');
+ return it;
+};
+},{}],15:[function(require,module,exports){
+module.exports = function(){ /* empty */ };
+},{}],16:[function(require,module,exports){
+module.exports = function(it, Constructor, name, forbiddenField){
+ if(!(it instanceof Constructor) || (forbiddenField !== undefined && forbiddenField in it)){
+ throw TypeError(name + ': incorrect invocation!');
+ } return it;
+};
+},{}],17:[function(require,module,exports){
+var isObject = require('./_is-object');
+module.exports = function(it){
+ if(!isObject(it))throw TypeError(it + ' is not an object!');
+ return it;
+};
+},{"./_is-object":38}],18:[function(require,module,exports){
+// false -> Array#indexOf
+// true -> Array#includes
+var toIObject = require('./_to-iobject')
+ , toLength = require('./_to-length')
+ , toIndex = require('./_to-index');
+module.exports = function(IS_INCLUDES){
+ return function($this, el, fromIndex){
+ var O = toIObject($this)
+ , length = toLength(O.length)
+ , index = toIndex(fromIndex, length)
+ , value;
+ // Array#includes uses SameValueZero equality algorithm
+ if(IS_INCLUDES && el != el)while(length > index){
+ value = O[index++];
+ if(value != value)return true;
+ // Array#toIndex ignores holes, Array#includes - not
+ } else for(;length > index; index++)if(IS_INCLUDES || index in O){
+ if(O[index] === el)return IS_INCLUDES || index || 0;
+ } return !IS_INCLUDES && -1;
+ };
+};
+},{"./_to-index":67,"./_to-iobject":69,"./_to-length":70}],19:[function(require,module,exports){
+// getting tag from 19.1.3.6 Object.prototype.toString()
+var cof = require('./_cof')
+ , TAG = require('./_wks')('toStringTag')
+ // ES3 wrong here
+ , ARG = cof(function(){ return arguments; }()) == 'Arguments';
+
+// fallback for IE11 Script Access Denied error
+var tryGet = function(it, key){
+ try {
+ return it[key];
+ } catch(e){ /* empty */ }
+};
+
+module.exports = function(it){
+ var O, T, B;
+ return it === undefined ? 'Undefined' : it === null ? 'Null'
+ // @@toStringTag case
+ : typeof (T = tryGet(O = Object(it), TAG)) == 'string' ? T
+ // builtinTag case
+ : ARG ? cof(O)
+ // ES3 arguments fallback
+ : (B = cof(O)) == 'Object' && typeof O.callee == 'function' ? 'Arguments' : B;
+};
+},{"./_cof":20,"./_wks":74}],20:[function(require,module,exports){
+var toString = {}.toString;
+
+module.exports = function(it){
+ return toString.call(it).slice(8, -1);
+};
+},{}],21:[function(require,module,exports){
+var core = module.exports = {version: '2.4.0'};
+if(typeof __e == 'number')__e = core; // eslint-disable-line no-undef
+},{}],22:[function(require,module,exports){
+// optional / simple context binding
+var aFunction = require('./_a-function');
+module.exports = function(fn, that, length){
+ aFunction(fn);
+ if(that === undefined)return fn;
+ switch(length){
+ case 1: return function(a){
+ return fn.call(that, a);
+ };
+ case 2: return function(a, b){
+ return fn.call(that, a, b);
+ };
+ case 3: return function(a, b, c){
+ return fn.call(that, a, b, c);
+ };
+ }
+ return function(/* ...args */){
+ return fn.apply(that, arguments);
+ };
+};
+},{"./_a-function":14}],23:[function(require,module,exports){
+// 7.2.1 RequireObjectCoercible(argument)
+module.exports = function(it){
+ if(it == undefined)throw TypeError("Can't call method on " + it);
+ return it;
+};
+},{}],24:[function(require,module,exports){
+// Thank's IE8 for his funny defineProperty
+module.exports = !require('./_fails')(function(){
+ return Object.defineProperty({}, 'a', {get: function(){ return 7; }}).a != 7;
+});
+},{"./_fails":28}],25:[function(require,module,exports){
+var isObject = require('./_is-object')
+ , document = require('./_global').document
+ // in old IE typeof document.createElement is 'object'
+ , is = isObject(document) && isObject(document.createElement);
+module.exports = function(it){
+ return is ? document.createElement(it) : {};
+};
+},{"./_global":30,"./_is-object":38}],26:[function(require,module,exports){
+// IE 8- don't enum bug keys
+module.exports = (
+ 'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf'
+).split(',');
+},{}],27:[function(require,module,exports){
+var global = require('./_global')
+ , core = require('./_core')
+ , ctx = require('./_ctx')
+ , hide = require('./_hide')
+ , PROTOTYPE = 'prototype';
+
+var $export = function(type, name, source){
+ var IS_FORCED = type & $export.F
+ , IS_GLOBAL = type & $export.G
+ , IS_STATIC = type & $export.S
+ , IS_PROTO = type & $export.P
+ , IS_BIND = type & $export.B
+ , IS_WRAP = type & $export.W
+ , exports = IS_GLOBAL ? core : core[name] || (core[name] = {})
+ , expProto = exports[PROTOTYPE]
+ , target = IS_GLOBAL ? global : IS_STATIC ? global[name] : (global[name] || {})[PROTOTYPE]
+ , key, own, out;
+ if(IS_GLOBAL)source = name;
+ for(key in source){
+ // contains in native
+ own = !IS_FORCED && target && target[key] !== undefined;
+ if(own && key in exports)continue;
+ // export native or passed
+ out = own ? target[key] : source[key];
+ // prevent global pollution for namespaces
+ exports[key] = IS_GLOBAL && typeof target[key] != 'function' ? source[key]
+ // bind timers to global for call from export context
+ : IS_BIND && own ? ctx(out, global)
+ // wrap global constructors for prevent change them in library
+ : IS_WRAP && target[key] == out ? (function(C){
+ var F = function(a, b, c){
+ if(this instanceof C){
+ switch(arguments.length){
+ case 0: return new C;
+ case 1: return new C(a);
+ case 2: return new C(a, b);
+ } return new C(a, b, c);
+ } return C.apply(this, arguments);
+ };
+ F[PROTOTYPE] = C[PROTOTYPE];
+ return F;
+ // make static versions for prototype methods
+ })(out) : IS_PROTO && typeof out == 'function' ? ctx(Function.call, out) : out;
+ // export proto methods to core.%CONSTRUCTOR%.methods.%NAME%
+ if(IS_PROTO){
+ (exports.virtual || (exports.virtual = {}))[key] = out;
+ // export proto methods to core.%CONSTRUCTOR%.prototype.%NAME%
+ if(type & $export.R && expProto && !expProto[key])hide(expProto, key, out);
+ }
+ }
+};
+// type bitmap
+$export.F = 1; // forced
+$export.G = 2; // global
+$export.S = 4; // static
+$export.P = 8; // proto
+$export.B = 16; // bind
+$export.W = 32; // wrap
+$export.U = 64; // safe
+$export.R = 128; // real proto method for `library`
+module.exports = $export;
+},{"./_core":21,"./_ctx":22,"./_global":30,"./_hide":32}],28:[function(require,module,exports){
+module.exports = function(exec){
+ try {
+ return !!exec();
+ } catch(e){
+ return true;
+ }
+};
+},{}],29:[function(require,module,exports){
+var ctx = require('./_ctx')
+ , call = require('./_iter-call')
+ , isArrayIter = require('./_is-array-iter')
+ , anObject = require('./_an-object')
+ , toLength = require('./_to-length')
+ , getIterFn = require('./core.get-iterator-method')
+ , BREAK = {}
+ , RETURN = {};
+var exports = module.exports = function(iterable, entries, fn, that, ITERATOR){
+ var iterFn = ITERATOR ? function(){ return iterable; } : getIterFn(iterable)
+ , f = ctx(fn, that, entries ? 2 : 1)
+ , index = 0
+ , length, step, iterator, result;
+ if(typeof iterFn != 'function')throw TypeError(iterable + ' is not iterable!');
+ // fast case for arrays with default iterator
+ if(isArrayIter(iterFn))for(length = toLength(iterable.length); length > index; index++){
+ result = entries ? f(anObject(step = iterable[index])[0], step[1]) : f(iterable[index]);
+ if(result === BREAK || result === RETURN)return result;
+ } else for(iterator = iterFn.call(iterable); !(step = iterator.next()).done; ){
+ result = call(iterator, f, step.value, entries);
+ if(result === BREAK || result === RETURN)return result;
+ }
+};
+exports.BREAK = BREAK;
+exports.RETURN = RETURN;
+},{"./_an-object":17,"./_ctx":22,"./_is-array-iter":37,"./_iter-call":39,"./_to-length":70,"./core.get-iterator-method":75}],30:[function(require,module,exports){
+// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
+var global = module.exports = typeof window != 'undefined' && window.Math == Math
+ ? window : typeof self != 'undefined' && self.Math == Math ? self : Function('return this')();
+if(typeof __g == 'number')__g = global; // eslint-disable-line no-undef
+},{}],31:[function(require,module,exports){
+var hasOwnProperty = {}.hasOwnProperty;
+module.exports = function(it, key){
+ return hasOwnProperty.call(it, key);
+};
+},{}],32:[function(require,module,exports){
+var dP = require('./_object-dp')
+ , createDesc = require('./_property-desc');
+module.exports = require('./_descriptors') ? function(object, key, value){
+ return dP.f(object, key, createDesc(1, value));
+} : function(object, key, value){
+ object[key] = value;
+ return object;
+};
+},{"./_descriptors":24,"./_object-dp":49,"./_property-desc":57}],33:[function(require,module,exports){
+module.exports = require('./_global').document && document.documentElement;
+},{"./_global":30}],34:[function(require,module,exports){
+module.exports = !require('./_descriptors') && !require('./_fails')(function(){
+ return Object.defineProperty(require('./_dom-create')('div'), 'a', {get: function(){ return 7; }}).a != 7;
+});
+},{"./_descriptors":24,"./_dom-create":25,"./_fails":28}],35:[function(require,module,exports){
+// fast apply, http://jsperf.lnkit.com/fast-apply/5
+module.exports = function(fn, args, that){
+ var un = that === undefined;
+ switch(args.length){
+ case 0: return un ? fn()
+ : fn.call(that);
+ case 1: return un ? fn(args[0])
+ : fn.call(that, args[0]);
+ case 2: return un ? fn(args[0], args[1])
+ : fn.call(that, args[0], args[1]);
+ case 3: return un ? fn(args[0], args[1], args[2])
+ : fn.call(that, args[0], args[1], args[2]);
+ case 4: return un ? fn(args[0], args[1], args[2], args[3])
+ : fn.call(that, args[0], args[1], args[2], args[3]);
+ } return fn.apply(that, args);
+};
+},{}],36:[function(require,module,exports){
+// fallback for non-array-like ES3 and non-enumerable old V8 strings
+var cof = require('./_cof');
+module.exports = Object('z').propertyIsEnumerable(0) ? Object : function(it){
+ return cof(it) == 'String' ? it.split('') : Object(it);
+};
+},{"./_cof":20}],37:[function(require,module,exports){
+// check on default Array iterator
+var Iterators = require('./_iterators')
+ , ITERATOR = require('./_wks')('iterator')
+ , ArrayProto = Array.prototype;
+
+module.exports = function(it){
+ return it !== undefined && (Iterators.Array === it || ArrayProto[ITERATOR] === it);
+};
+},{"./_iterators":44,"./_wks":74}],38:[function(require,module,exports){
+module.exports = function(it){
+ return typeof it === 'object' ? it !== null : typeof it === 'function';
+};
+},{}],39:[function(require,module,exports){
+// call something on iterator step with safe closing on error
+var anObject = require('./_an-object');
+module.exports = function(iterator, fn, value, entries){
+ try {
+ return entries ? fn(anObject(value)[0], value[1]) : fn(value);
+ // 7.4.6 IteratorClose(iterator, completion)
+ } catch(e){
+ var ret = iterator['return'];
+ if(ret !== undefined)anObject(ret.call(iterator));
+ throw e;
+ }
+};
+},{"./_an-object":17}],40:[function(require,module,exports){
+'use strict';
+var create = require('./_object-create')
+ , descriptor = require('./_property-desc')
+ , setToStringTag = require('./_set-to-string-tag')
+ , IteratorPrototype = {};
+
+// 25.1.2.1.1 %IteratorPrototype%[@@iterator]()
+require('./_hide')(IteratorPrototype, require('./_wks')('iterator'), function(){ return this; });
+
+module.exports = function(Constructor, NAME, next){
+ Constructor.prototype = create(IteratorPrototype, {next: descriptor(1, next)});
+ setToStringTag(Constructor, NAME + ' Iterator');
+};
+},{"./_hide":32,"./_object-create":48,"./_property-desc":57,"./_set-to-string-tag":61,"./_wks":74}],41:[function(require,module,exports){
+'use strict';
+var LIBRARY = require('./_library')
+ , $export = require('./_export')
+ , redefine = require('./_redefine')
+ , hide = require('./_hide')
+ , has = require('./_has')
+ , Iterators = require('./_iterators')
+ , $iterCreate = require('./_iter-create')
+ , setToStringTag = require('./_set-to-string-tag')
+ , getPrototypeOf = require('./_object-gpo')
+ , ITERATOR = require('./_wks')('iterator')
+ , BUGGY = !([].keys && 'next' in [].keys()) // Safari has buggy iterators w/o `next`
+ , FF_ITERATOR = '@@iterator'
+ , KEYS = 'keys'
+ , VALUES = 'values';
+
+var returnThis = function(){ return this; };
+
+module.exports = function(Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED){
+ $iterCreate(Constructor, NAME, next);
+ var getMethod = function(kind){
+ if(!BUGGY && kind in proto)return proto[kind];
+ switch(kind){
+ case KEYS: return function keys(){ return new Constructor(this, kind); };
+ case VALUES: return function values(){ return new Constructor(this, kind); };
+ } return function entries(){ return new Constructor(this, kind); };
+ };
+ var TAG = NAME + ' Iterator'
+ , DEF_VALUES = DEFAULT == VALUES
+ , VALUES_BUG = false
+ , proto = Base.prototype
+ , $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT]
+ , $default = $native || getMethod(DEFAULT)
+ , $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined
+ , $anyNative = NAME == 'Array' ? proto.entries || $native : $native
+ , methods, key, IteratorPrototype;
+ // Fix native
+ if($anyNative){
+ IteratorPrototype = getPrototypeOf($anyNative.call(new Base));
+ if(IteratorPrototype !== Object.prototype){
+ // Set @@toStringTag to native iterators
+ setToStringTag(IteratorPrototype, TAG, true);
+ // fix for some old engines
+ if(!LIBRARY && !has(IteratorPrototype, ITERATOR))hide(IteratorPrototype, ITERATOR, returnThis);
+ }
+ }
+ // fix Array#{values, @@iterator}.name in V8 / FF
+ if(DEF_VALUES && $native && $native.name !== VALUES){
+ VALUES_BUG = true;
+ $default = function values(){ return $native.call(this); };
+ }
+ // Define iterator
+ if((!LIBRARY || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])){
+ hide(proto, ITERATOR, $default);
+ }
+ // Plug for library
+ Iterators[NAME] = $default;
+ Iterators[TAG] = returnThis;
+ if(DEFAULT){
+ methods = {
+ values: DEF_VALUES ? $default : getMethod(VALUES),
+ keys: IS_SET ? $default : getMethod(KEYS),
+ entries: $entries
+ };
+ if(FORCED)for(key in methods){
+ if(!(key in proto))redefine(proto, key, methods[key]);
+ } else $export($export.P + $export.F * (BUGGY || VALUES_BUG), NAME, methods);
+ }
+ return methods;
+};
+},{"./_export":27,"./_has":31,"./_hide":32,"./_iter-create":40,"./_iterators":44,"./_library":45,"./_object-gpo":52,"./_redefine":59,"./_set-to-string-tag":61,"./_wks":74}],42:[function(require,module,exports){
+var ITERATOR = require('./_wks')('iterator')
+ , SAFE_CLOSING = false;
+
+try {
+ var riter = [7][ITERATOR]();
+ riter['return'] = function(){ SAFE_CLOSING = true; };
+ Array.from(riter, function(){ throw 2; });
+} catch(e){ /* empty */ }
+
+module.exports = function(exec, skipClosing){
+ if(!skipClosing && !SAFE_CLOSING)return false;
+ var safe = false;
+ try {
+ var arr = [7]
+ , iter = arr[ITERATOR]();
+ iter.next = function(){ return {done: safe = true}; };
+ arr[ITERATOR] = function(){ return iter; };
+ exec(arr);
+ } catch(e){ /* empty */ }
+ return safe;
+};
+},{"./_wks":74}],43:[function(require,module,exports){
+module.exports = function(done, value){
+ return {value: value, done: !!done};
+};
+},{}],44:[function(require,module,exports){
+module.exports = {};
+},{}],45:[function(require,module,exports){
+module.exports = true;
+},{}],46:[function(require,module,exports){
+var global = require('./_global')
+ , macrotask = require('./_task').set
+ , Observer = global.MutationObserver || global.WebKitMutationObserver
+ , process = global.process
+ , Promise = global.Promise
+ , isNode = require('./_cof')(process) == 'process';
+
+module.exports = function(){
+ var head, last, notify;
+
+ var flush = function(){
+ var parent, fn;
+ if(isNode && (parent = process.domain))parent.exit();
+ while(head){
+ fn = head.fn;
+ head = head.next;
+ try {
+ fn();
+ } catch(e){
+ if(head)notify();
+ else last = undefined;
+ throw e;
+ }
+ } last = undefined;
+ if(parent)parent.enter();
+ };
+
+ // Node.js
+ if(isNode){
+ notify = function(){
+ process.nextTick(flush);
+ };
+ // browsers with MutationObserver
+ } else if(Observer){
+ var toggle = true
+ , node = document.createTextNode('');
+ new Observer(flush).observe(node, {characterData: true}); // eslint-disable-line no-new
+ notify = function(){
+ node.data = toggle = !toggle;
+ };
+ // environments with maybe non-completely correct, but existent Promise
+ } else if(Promise && Promise.resolve){
+ var promise = Promise.resolve();
+ notify = function(){
+ promise.then(flush);
+ };
+ // for other environments - macrotask based on:
+ // - setImmediate
+ // - MessageChannel
+ // - window.postMessag
+ // - onreadystatechange
+ // - setTimeout
+ } else {
+ notify = function(){
+ // strange IE + webpack dev server bug - use .call(global)
+ macrotask.call(global, flush);
+ };
+ }
+
+ return function(fn){
+ var task = {fn: fn, next: undefined};
+ if(last)last.next = task;
+ if(!head){
+ head = task;
+ notify();
+ } last = task;
+ };
+};
+},{"./_cof":20,"./_global":30,"./_task":66}],47:[function(require,module,exports){
+'use strict';
+// 19.1.2.1 Object.assign(target, source, ...)
+var getKeys = require('./_object-keys')
+ , gOPS = require('./_object-gops')
+ , pIE = require('./_object-pie')
+ , toObject = require('./_to-object')
+ , IObject = require('./_iobject')
+ , $assign = Object.assign;
+
+// should work with symbols and should have deterministic property order (V8 bug)
+module.exports = !$assign || require('./_fails')(function(){
+ var A = {}
+ , B = {}
+ , S = Symbol()
+ , K = 'abcdefghijklmnopqrst';
+ A[S] = 7;
+ K.split('').forEach(function(k){ B[k] = k; });
+ return $assign({}, A)[S] != 7 || Object.keys($assign({}, B)).join('') != K;
+}) ? function assign(target, source){ // eslint-disable-line no-unused-vars
+ var T = toObject(target)
+ , aLen = arguments.length
+ , index = 1
+ , getSymbols = gOPS.f
+ , isEnum = pIE.f;
+ while(aLen > index){
+ var S = IObject(arguments[index++])
+ , keys = getSymbols ? getKeys(S).concat(getSymbols(S)) : getKeys(S)
+ , length = keys.length
+ , j = 0
+ , key;
+ while(length > j)if(isEnum.call(S, key = keys[j++]))T[key] = S[key];
+ } return T;
+} : $assign;
+},{"./_fails":28,"./_iobject":36,"./_object-gops":51,"./_object-keys":54,"./_object-pie":55,"./_to-object":71}],48:[function(require,module,exports){
+// 19.1.2.2 / 15.2.3.5 Object.create(O [, Properties])
+var anObject = require('./_an-object')
+ , dPs = require('./_object-dps')
+ , enumBugKeys = require('./_enum-bug-keys')
+ , IE_PROTO = require('./_shared-key')('IE_PROTO')
+ , Empty = function(){ /* empty */ }
+ , PROTOTYPE = 'prototype';
+
+// Create object with fake `null` prototype: use iframe Object with cleared prototype
+var createDict = function(){
+ // Thrash, waste and sodomy: IE GC bug
+ var iframe = require('./_dom-create')('iframe')
+ , i = enumBugKeys.length
+ , lt = '<'
+ , gt = '>'
+ , iframeDocument;
+ iframe.style.display = 'none';
+ require('./_html').appendChild(iframe);
+ iframe.src = 'javascript:'; // eslint-disable-line no-script-url
+ // createDict = iframe.contentWindow.Object;
+ // html.removeChild(iframe);
+ iframeDocument = iframe.contentWindow.document;
+ iframeDocument.open();
+ iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt);
+ iframeDocument.close();
+ createDict = iframeDocument.F;
+ while(i--)delete createDict[PROTOTYPE][enumBugKeys[i]];
+ return createDict();
+};
+
+module.exports = Object.create || function create(O, Properties){
+ var result;
+ if(O !== null){
+ Empty[PROTOTYPE] = anObject(O);
+ result = new Empty;
+ Empty[PROTOTYPE] = null;
+ // add "__proto__" for Object.getPrototypeOf polyfill
+ result[IE_PROTO] = O;
+ } else result = createDict();
+ return Properties === undefined ? result : dPs(result, Properties);
+};
+
+},{"./_an-object":17,"./_dom-create":25,"./_enum-bug-keys":26,"./_html":33,"./_object-dps":50,"./_shared-key":62}],49:[function(require,module,exports){
+var anObject = require('./_an-object')
+ , IE8_DOM_DEFINE = require('./_ie8-dom-define')
+ , toPrimitive = require('./_to-primitive')
+ , dP = Object.defineProperty;
+
+exports.f = require('./_descriptors') ? Object.defineProperty : function defineProperty(O, P, Attributes){
+ anObject(O);
+ P = toPrimitive(P, true);
+ anObject(Attributes);
+ if(IE8_DOM_DEFINE)try {
+ return dP(O, P, Attributes);
+ } catch(e){ /* empty */ }
+ if('get' in Attributes || 'set' in Attributes)throw TypeError('Accessors not supported!');
+ if('value' in Attributes)O[P] = Attributes.value;
+ return O;
+};
+},{"./_an-object":17,"./_descriptors":24,"./_ie8-dom-define":34,"./_to-primitive":72}],50:[function(require,module,exports){
+var dP = require('./_object-dp')
+ , anObject = require('./_an-object')
+ , getKeys = require('./_object-keys');
+
+module.exports = require('./_descriptors') ? Object.defineProperties : function defineProperties(O, Properties){
+ anObject(O);
+ var keys = getKeys(Properties)
+ , length = keys.length
+ , i = 0
+ , P;
+ while(length > i)dP.f(O, P = keys[i++], Properties[P]);
+ return O;
+};
+},{"./_an-object":17,"./_descriptors":24,"./_object-dp":49,"./_object-keys":54}],51:[function(require,module,exports){
+exports.f = Object.getOwnPropertySymbols;
+},{}],52:[function(require,module,exports){
+// 19.1.2.9 / 15.2.3.2 Object.getPrototypeOf(O)
+var has = require('./_has')
+ , toObject = require('./_to-object')
+ , IE_PROTO = require('./_shared-key')('IE_PROTO')
+ , ObjectProto = Object.prototype;
+
+module.exports = Object.getPrototypeOf || function(O){
+ O = toObject(O);
+ if(has(O, IE_PROTO))return O[IE_PROTO];
+ if(typeof O.constructor == 'function' && O instanceof O.constructor){
+ return O.constructor.prototype;
+ } return O instanceof Object ? ObjectProto : null;
+};
+},{"./_has":31,"./_shared-key":62,"./_to-object":71}],53:[function(require,module,exports){
+var has = require('./_has')
+ , toIObject = require('./_to-iobject')
+ , arrayIndexOf = require('./_array-includes')(false)
+ , IE_PROTO = require('./_shared-key')('IE_PROTO');
+
+module.exports = function(object, names){
+ var O = toIObject(object)
+ , i = 0
+ , result = []
+ , key;
+ for(key in O)if(key != IE_PROTO)has(O, key) && result.push(key);
+ // Don't enum bug & hidden keys
+ while(names.length > i)if(has(O, key = names[i++])){
+ ~arrayIndexOf(result, key) || result.push(key);
+ }
+ return result;
+};
+},{"./_array-includes":18,"./_has":31,"./_shared-key":62,"./_to-iobject":69}],54:[function(require,module,exports){
+// 19.1.2.14 / 15.2.3.14 Object.keys(O)
+var $keys = require('./_object-keys-internal')
+ , enumBugKeys = require('./_enum-bug-keys');
+
+module.exports = Object.keys || function keys(O){
+ return $keys(O, enumBugKeys);
+};
+},{"./_enum-bug-keys":26,"./_object-keys-internal":53}],55:[function(require,module,exports){
+exports.f = {}.propertyIsEnumerable;
+},{}],56:[function(require,module,exports){
+// most Object methods by ES6 should accept primitives
+var $export = require('./_export')
+ , core = require('./_core')
+ , fails = require('./_fails');
+module.exports = function(KEY, exec){
+ var fn = (core.Object || {})[KEY] || Object[KEY]
+ , exp = {};
+ exp[KEY] = exec(fn);
+ $export($export.S + $export.F * fails(function(){ fn(1); }), 'Object', exp);
+};
+},{"./_core":21,"./_export":27,"./_fails":28}],57:[function(require,module,exports){
+module.exports = function(bitmap, value){
+ return {
+ enumerable : !(bitmap & 1),
+ configurable: !(bitmap & 2),
+ writable : !(bitmap & 4),
+ value : value
+ };
+};
+},{}],58:[function(require,module,exports){
+var hide = require('./_hide');
+module.exports = function(target, src, safe){
+ for(var key in src){
+ if(safe && target[key])target[key] = src[key];
+ else hide(target, key, src[key]);
+ } return target;
+};
+},{"./_hide":32}],59:[function(require,module,exports){
+module.exports = require('./_hide');
+},{"./_hide":32}],60:[function(require,module,exports){
+'use strict';
+var global = require('./_global')
+ , core = require('./_core')
+ , dP = require('./_object-dp')
+ , DESCRIPTORS = require('./_descriptors')
+ , SPECIES = require('./_wks')('species');
+
+module.exports = function(KEY){
+ var C = typeof core[KEY] == 'function' ? core[KEY] : global[KEY];
+ if(DESCRIPTORS && C && !C[SPECIES])dP.f(C, SPECIES, {
+ configurable: true,
+ get: function(){ return this; }
+ });
+};
+},{"./_core":21,"./_descriptors":24,"./_global":30,"./_object-dp":49,"./_wks":74}],61:[function(require,module,exports){
+var def = require('./_object-dp').f
+ , has = require('./_has')
+ , TAG = require('./_wks')('toStringTag');
+
+module.exports = function(it, tag, stat){
+ if(it && !has(it = stat ? it : it.prototype, TAG))def(it, TAG, {configurable: true, value: tag});
+};
+},{"./_has":31,"./_object-dp":49,"./_wks":74}],62:[function(require,module,exports){
+var shared = require('./_shared')('keys')
+ , uid = require('./_uid');
+module.exports = function(key){
+ return shared[key] || (shared[key] = uid(key));
+};
+},{"./_shared":63,"./_uid":73}],63:[function(require,module,exports){
+var global = require('./_global')
+ , SHARED = '__core-js_shared__'
+ , store = global[SHARED] || (global[SHARED] = {});
+module.exports = function(key){
+ return store[key] || (store[key] = {});
+};
+},{"./_global":30}],64:[function(require,module,exports){
+// 7.3.20 SpeciesConstructor(O, defaultConstructor)
+var anObject = require('./_an-object')
+ , aFunction = require('./_a-function')
+ , SPECIES = require('./_wks')('species');
+module.exports = function(O, D){
+ var C = anObject(O).constructor, S;
+ return C === undefined || (S = anObject(C)[SPECIES]) == undefined ? D : aFunction(S);
+};
+},{"./_a-function":14,"./_an-object":17,"./_wks":74}],65:[function(require,module,exports){
+var toInteger = require('./_to-integer')
+ , defined = require('./_defined');
+// true -> String#at
+// false -> String#codePointAt
+module.exports = function(TO_STRING){
+ return function(that, pos){
+ var s = String(defined(that))
+ , i = toInteger(pos)
+ , l = s.length
+ , a, b;
+ if(i < 0 || i >= l)return TO_STRING ? '' : undefined;
+ a = s.charCodeAt(i);
+ return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff
+ ? TO_STRING ? s.charAt(i) : a
+ : TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000;
+ };
+};
+},{"./_defined":23,"./_to-integer":68}],66:[function(require,module,exports){
+var ctx = require('./_ctx')
+ , invoke = require('./_invoke')
+ , html = require('./_html')
+ , cel = require('./_dom-create')
+ , global = require('./_global')
+ , process = global.process
+ , setTask = global.setImmediate
+ , clearTask = global.clearImmediate
+ , MessageChannel = global.MessageChannel
+ , counter = 0
+ , queue = {}
+ , ONREADYSTATECHANGE = 'onreadystatechange'
+ , defer, channel, port;
+var run = function(){
+ var id = +this;
+ if(queue.hasOwnProperty(id)){
+ var fn = queue[id];
+ delete queue[id];
+ fn();
+ }
+};
+var listener = function(event){
+ run.call(event.data);
+};
+// Node.js 0.9+ & IE10+ has setImmediate, otherwise:
+if(!setTask || !clearTask){
+ setTask = function setImmediate(fn){
+ var args = [], i = 1;
+ while(arguments.length > i)args.push(arguments[i++]);
+ queue[++counter] = function(){
+ invoke(typeof fn == 'function' ? fn : Function(fn), args);
+ };
+ defer(counter);
+ return counter;
+ };
+ clearTask = function clearImmediate(id){
+ delete queue[id];
+ };
+ // Node.js 0.8-
+ if(require('./_cof')(process) == 'process'){
+ defer = function(id){
+ process.nextTick(ctx(run, id, 1));
+ };
+ // Browsers with MessageChannel, includes WebWorkers
+ } else if(MessageChannel){
+ channel = new MessageChannel;
+ port = channel.port2;
+ channel.port1.onmessage = listener;
+ defer = ctx(port.postMessage, port, 1);
+ // Browsers with postMessage, skip WebWorkers
+ // IE8 has postMessage, but it's sync & typeof its postMessage is 'object'
+ } else if(global.addEventListener && typeof postMessage == 'function' && !global.importScripts){
+ defer = function(id){
+ global.postMessage(id + '', '*');
+ };
+ global.addEventListener('message', listener, false);
+ // IE8-
+ } else if(ONREADYSTATECHANGE in cel('script')){
+ defer = function(id){
+ html.appendChild(cel('script'))[ONREADYSTATECHANGE] = function(){
+ html.removeChild(this);
+ run.call(id);
+ };
+ };
+ // Rest old browsers
+ } else {
+ defer = function(id){
+ setTimeout(ctx(run, id, 1), 0);
+ };
+ }
+}
+module.exports = {
+ set: setTask,
+ clear: clearTask
+};
+},{"./_cof":20,"./_ctx":22,"./_dom-create":25,"./_global":30,"./_html":33,"./_invoke":35}],67:[function(require,module,exports){
+var toInteger = require('./_to-integer')
+ , max = Math.max
+ , min = Math.min;
+module.exports = function(index, length){
+ index = toInteger(index);
+ return index < 0 ? max(index + length, 0) : min(index, length);
+};
+},{"./_to-integer":68}],68:[function(require,module,exports){
+// 7.1.4 ToInteger
+var ceil = Math.ceil
+ , floor = Math.floor;
+module.exports = function(it){
+ return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
+};
+},{}],69:[function(require,module,exports){
+// to indexed object, toObject with fallback for non-array-like ES3 strings
+var IObject = require('./_iobject')
+ , defined = require('./_defined');
+module.exports = function(it){
+ return IObject(defined(it));
+};
+},{"./_defined":23,"./_iobject":36}],70:[function(require,module,exports){
+// 7.1.15 ToLength
+var toInteger = require('./_to-integer')
+ , min = Math.min;
+module.exports = function(it){
+ return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0; // pow(2, 53) - 1 == 9007199254740991
+};
+},{"./_to-integer":68}],71:[function(require,module,exports){
+// 7.1.13 ToObject(argument)
+var defined = require('./_defined');
+module.exports = function(it){
+ return Object(defined(it));
+};
+},{"./_defined":23}],72:[function(require,module,exports){
+// 7.1.1 ToPrimitive(input [, PreferredType])
+var isObject = require('./_is-object');
+// instead of the ES6 spec version, we didn't implement @@toPrimitive case
+// and the second argument - flag - preferred type is a string
+module.exports = function(it, S){
+ if(!isObject(it))return it;
+ var fn, val;
+ if(S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it)))return val;
+ if(typeof (fn = it.valueOf) == 'function' && !isObject(val = fn.call(it)))return val;
+ if(!S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it)))return val;
+ throw TypeError("Can't convert object to primitive value");
+};
+},{"./_is-object":38}],73:[function(require,module,exports){
+var id = 0
+ , px = Math.random();
+module.exports = function(key){
+ return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36));
+};
+},{}],74:[function(require,module,exports){
+var store = require('./_shared')('wks')
+ , uid = require('./_uid')
+ , Symbol = require('./_global').Symbol
+ , USE_SYMBOL = typeof Symbol == 'function';
+
+var $exports = module.exports = function(name){
+ return store[name] || (store[name] =
+ USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : uid)('Symbol.' + name));
+};
+
+$exports.store = store;
+},{"./_global":30,"./_shared":63,"./_uid":73}],75:[function(require,module,exports){
+var classof = require('./_classof')
+ , ITERATOR = require('./_wks')('iterator')
+ , Iterators = require('./_iterators');
+module.exports = require('./_core').getIteratorMethod = function(it){
+ if(it != undefined)return it[ITERATOR]
+ || it['@@iterator']
+ || Iterators[classof(it)];
+};
+},{"./_classof":19,"./_core":21,"./_iterators":44,"./_wks":74}],76:[function(require,module,exports){
+'use strict';
+var addToUnscopables = require('./_add-to-unscopables')
+ , step = require('./_iter-step')
+ , Iterators = require('./_iterators')
+ , toIObject = require('./_to-iobject');
+
+// 22.1.3.4 Array.prototype.entries()
+// 22.1.3.13 Array.prototype.keys()
+// 22.1.3.29 Array.prototype.values()
+// 22.1.3.30 Array.prototype[@@iterator]()
+module.exports = require('./_iter-define')(Array, 'Array', function(iterated, kind){
+ this._t = toIObject(iterated); // target
+ this._i = 0; // next index
+ this._k = kind; // kind
+// 22.1.5.2.1 %ArrayIteratorPrototype%.next()
+}, function(){
+ var O = this._t
+ , kind = this._k
+ , index = this._i++;
+ if(!O || index >= O.length){
+ this._t = undefined;
+ return step(1);
+ }
+ if(kind == 'keys' )return step(0, index);
+ if(kind == 'values')return step(0, O[index]);
+ return step(0, [index, O[index]]);
+}, 'values');
+
+// argumentsList[@@iterator] is %ArrayProto_values% (9.4.4.6, 9.4.4.7)
+Iterators.Arguments = Iterators.Array;
+
+addToUnscopables('keys');
+addToUnscopables('values');
+addToUnscopables('entries');
+},{"./_add-to-unscopables":15,"./_iter-define":41,"./_iter-step":43,"./_iterators":44,"./_to-iobject":69}],77:[function(require,module,exports){
+// 19.1.3.1 Object.assign(target, source)
+var $export = require('./_export');
+
+$export($export.S + $export.F, 'Object', {assign: require('./_object-assign')});
+},{"./_export":27,"./_object-assign":47}],78:[function(require,module,exports){
+// 19.1.2.14 Object.keys(O)
+var toObject = require('./_to-object')
+ , $keys = require('./_object-keys');
+
+require('./_object-sap')('keys', function(){
+ return function keys(it){
+ return $keys(toObject(it));
+ };
+});
+},{"./_object-keys":54,"./_object-sap":56,"./_to-object":71}],79:[function(require,module,exports){
+arguments[4][9][0].apply(exports,arguments)
+},{"dup":9}],80:[function(require,module,exports){
+'use strict';
+var LIBRARY = require('./_library')
+ , global = require('./_global')
+ , ctx = require('./_ctx')
+ , classof = require('./_classof')
+ , $export = require('./_export')
+ , isObject = require('./_is-object')
+ , aFunction = require('./_a-function')
+ , anInstance = require('./_an-instance')
+ , forOf = require('./_for-of')
+ , speciesConstructor = require('./_species-constructor')
+ , task = require('./_task').set
+ , microtask = require('./_microtask')()
+ , PROMISE = 'Promise'
+ , TypeError = global.TypeError
+ , process = global.process
+ , $Promise = global[PROMISE]
+ , process = global.process
+ , isNode = classof(process) == 'process'
+ , empty = function(){ /* empty */ }
+ , Internal, GenericPromiseCapability, Wrapper;
+
+var USE_NATIVE = !!function(){
+ try {
+ // correct subclassing with @@species support
+ var promise = $Promise.resolve(1)
+ , FakePromise = (promise.constructor = {})[require('./_wks')('species')] = function(exec){ exec(empty, empty); };
+ // unhandled rejections tracking support, NodeJS Promise without it fails @@species test
+ return (isNode || typeof PromiseRejectionEvent == 'function') && promise.then(empty) instanceof FakePromise;
+ } catch(e){ /* empty */ }
+}();
+
+// helpers
+var sameConstructor = function(a, b){
+ // with library wrapper special case
+ return a === b || a === $Promise && b === Wrapper;
+};
+var isThenable = function(it){
+ var then;
+ return isObject(it) && typeof (then = it.then) == 'function' ? then : false;
+};
+var newPromiseCapability = function(C){
+ return sameConstructor($Promise, C)
+ ? new PromiseCapability(C)
+ : new GenericPromiseCapability(C);
+};
+var PromiseCapability = GenericPromiseCapability = function(C){
+ var resolve, reject;
+ this.promise = new C(function($$resolve, $$reject){
+ if(resolve !== undefined || reject !== undefined)throw TypeError('Bad Promise constructor');
+ resolve = $$resolve;
+ reject = $$reject;
+ });
+ this.resolve = aFunction(resolve);
+ this.reject = aFunction(reject);
+};
+var perform = function(exec){
+ try {
+ exec();
+ } catch(e){
+ return {error: e};
+ }
+};
+var notify = function(promise, isReject){
+ if(promise._n)return;
+ promise._n = true;
+ var chain = promise._c;
+ microtask(function(){
+ var value = promise._v
+ , ok = promise._s == 1
+ , i = 0;
+ var run = function(reaction){
+ var handler = ok ? reaction.ok : reaction.fail
+ , resolve = reaction.resolve
+ , reject = reaction.reject
+ , domain = reaction.domain
+ , result, then;
+ try {
+ if(handler){
+ if(!ok){
+ if(promise._h == 2)onHandleUnhandled(promise);
+ promise._h = 1;
+ }
+ if(handler === true)result = value;
+ else {
+ if(domain)domain.enter();
+ result = handler(value);
+ if(domain)domain.exit();
+ }
+ if(result === reaction.promise){
+ reject(TypeError('Promise-chain cycle'));
+ } else if(then = isThenable(result)){
+ then.call(result, resolve, reject);
+ } else resolve(result);
+ } else reject(value);
+ } catch(e){
+ reject(e);
+ }
+ };
+ while(chain.length > i)run(chain[i++]); // variable length - can't use forEach
+ promise._c = [];
+ promise._n = false;
+ if(isReject && !promise._h)onUnhandled(promise);
+ });
+};
+var onUnhandled = function(promise){
+ task.call(global, function(){
+ var value = promise._v
+ , abrupt, handler, console;
+ if(isUnhandled(promise)){
+ abrupt = perform(function(){
+ if(isNode){
+ process.emit('unhandledRejection', value, promise);
+ } else if(handler = global.onunhandledrejection){
+ handler({promise: promise, reason: value});
+ } else if((console = global.console) && console.error){
+ console.error('Unhandled promise rejection', value);
+ }
+ });
+ // Browsers should not trigger `rejectionHandled` event if it was handled here, NodeJS - should
+ promise._h = isNode || isUnhandled(promise) ? 2 : 1;
+ } promise._a = undefined;
+ if(abrupt)throw abrupt.error;
+ });
+};
+var isUnhandled = function(promise){
+ if(promise._h == 1)return false;
+ var chain = promise._a || promise._c
+ , i = 0
+ , reaction;
+ while(chain.length > i){
+ reaction = chain[i++];
+ if(reaction.fail || !isUnhandled(reaction.promise))return false;
+ } return true;
+};
+var onHandleUnhandled = function(promise){
+ task.call(global, function(){
+ var handler;
+ if(isNode){
+ process.emit('rejectionHandled', promise);
+ } else if(handler = global.onrejectionhandled){
+ handler({promise: promise, reason: promise._v});
+ }
+ });
+};
+var $reject = function(value){
+ var promise = this;
+ if(promise._d)return;
+ promise._d = true;
+ promise = promise._w || promise; // unwrap
+ promise._v = value;
+ promise._s = 2;
+ if(!promise._a)promise._a = promise._c.slice();
+ notify(promise, true);
+};
+var $resolve = function(value){
+ var promise = this
+ , then;
+ if(promise._d)return;
+ promise._d = true;
+ promise = promise._w || promise; // unwrap
+ try {
+ if(promise === value)throw TypeError("Promise can't be resolved itself");
+ if(then = isThenable(value)){
+ microtask(function(){
+ var wrapper = {_w: promise, _d: false}; // wrap
+ try {
+ then.call(value, ctx($resolve, wrapper, 1), ctx($reject, wrapper, 1));
+ } catch(e){
+ $reject.call(wrapper, e);
+ }
+ });
+ } else {
+ promise._v = value;
+ promise._s = 1;
+ notify(promise, false);
+ }
+ } catch(e){
+ $reject.call({_w: promise, _d: false}, e); // wrap
+ }
+};
+
+// constructor polyfill
+if(!USE_NATIVE){
+ // 25.4.3.1 Promise(executor)
+ $Promise = function Promise(executor){
+ anInstance(this, $Promise, PROMISE, '_h');
+ aFunction(executor);
+ Internal.call(this);
+ try {
+ executor(ctx($resolve, this, 1), ctx($reject, this, 1));
+ } catch(err){
+ $reject.call(this, err);
+ }
+ };
+ Internal = function Promise(executor){
+ this._c = []; // <- awaiting reactions
+ this._a = undefined; // <- checked in isUnhandled reactions
+ this._s = 0; // <- state
+ this._d = false; // <- done
+ this._v = undefined; // <- value
+ this._h = 0; // <- rejection state, 0 - default, 1 - handled, 2 - unhandled
+ this._n = false; // <- notify
+ };
+ Internal.prototype = require('./_redefine-all')($Promise.prototype, {
+ // 25.4.5.3 Promise.prototype.then(onFulfilled, onRejected)
+ then: function then(onFulfilled, onRejected){
+ var reaction = newPromiseCapability(speciesConstructor(this, $Promise));
+ reaction.ok = typeof onFulfilled == 'function' ? onFulfilled : true;
+ reaction.fail = typeof onRejected == 'function' && onRejected;
+ reaction.domain = isNode ? process.domain : undefined;
+ this._c.push(reaction);
+ if(this._a)this._a.push(reaction);
+ if(this._s)notify(this, false);
+ return reaction.promise;
+ },
+ // 25.4.5.1 Promise.prototype.catch(onRejected)
+ 'catch': function(onRejected){
+ return this.then(undefined, onRejected);
+ }
+ });
+ PromiseCapability = function(){
+ var promise = new Internal;
+ this.promise = promise;
+ this.resolve = ctx($resolve, promise, 1);
+ this.reject = ctx($reject, promise, 1);
+ };
+}
+
+$export($export.G + $export.W + $export.F * !USE_NATIVE, {Promise: $Promise});
+require('./_set-to-string-tag')($Promise, PROMISE);
+require('./_set-species')(PROMISE);
+Wrapper = require('./_core')[PROMISE];
+
+// statics
+$export($export.S + $export.F * !USE_NATIVE, PROMISE, {
+ // 25.4.4.5 Promise.reject(r)
+ reject: function reject(r){
+ var capability = newPromiseCapability(this)
+ , $$reject = capability.reject;
+ $$reject(r);
+ return capability.promise;
+ }
+});
+$export($export.S + $export.F * (LIBRARY || !USE_NATIVE), PROMISE, {
+ // 25.4.4.6 Promise.resolve(x)
+ resolve: function resolve(x){
+ // instanceof instead of internal slot check because we should fix it without replacement native Promise core
+ if(x instanceof $Promise && sameConstructor(x.constructor, this))return x;
+ var capability = newPromiseCapability(this)
+ , $$resolve = capability.resolve;
+ $$resolve(x);
+ return capability.promise;
+ }
+});
+$export($export.S + $export.F * !(USE_NATIVE && require('./_iter-detect')(function(iter){
+ $Promise.all(iter)['catch'](empty);
+})), PROMISE, {
+ // 25.4.4.1 Promise.all(iterable)
+ all: function all(iterable){
+ var C = this
+ , capability = newPromiseCapability(C)
+ , resolve = capability.resolve
+ , reject = capability.reject;
+ var abrupt = perform(function(){
+ var values = []
+ , index = 0
+ , remaining = 1;
+ forOf(iterable, false, function(promise){
+ var $index = index++
+ , alreadyCalled = false;
+ values.push(undefined);
+ remaining++;
+ C.resolve(promise).then(function(value){
+ if(alreadyCalled)return;
+ alreadyCalled = true;
+ values[$index] = value;
+ --remaining || resolve(values);
+ }, reject);
+ });
+ --remaining || resolve(values);
+ });
+ if(abrupt)reject(abrupt.error);
+ return capability.promise;
+ },
+ // 25.4.4.4 Promise.race(iterable)
+ race: function race(iterable){
+ var C = this
+ , capability = newPromiseCapability(C)
+ , reject = capability.reject;
+ var abrupt = perform(function(){
+ forOf(iterable, false, function(promise){
+ C.resolve(promise).then(capability.resolve, reject);
+ });
+ });
+ if(abrupt)reject(abrupt.error);
+ return capability.promise;
+ }
+});
+},{"./_a-function":14,"./_an-instance":16,"./_classof":19,"./_core":21,"./_ctx":22,"./_export":27,"./_for-of":29,"./_global":30,"./_is-object":38,"./_iter-detect":42,"./_library":45,"./_microtask":46,"./_redefine-all":58,"./_set-species":60,"./_set-to-string-tag":61,"./_species-constructor":64,"./_task":66,"./_wks":74}],81:[function(require,module,exports){
+'use strict';
+var $at = require('./_string-at')(true);
+
+// 21.1.3.27 String.prototype[@@iterator]()
+require('./_iter-define')(String, 'String', function(iterated){
+ this._t = String(iterated); // target
+ this._i = 0; // next index
+// 21.1.5.2.1 %StringIteratorPrototype%.next()
+}, function(){
+ var O = this._t
+ , index = this._i
+ , point;
+ if(index >= O.length)return {value: undefined, done: true};
+ point = $at(O, index);
+ this._i += point.length;
+ return {value: point, done: false};
+});
+},{"./_iter-define":41,"./_string-at":65}],82:[function(require,module,exports){
+require('./es6.array.iterator');
+var global = require('./_global')
+ , hide = require('./_hide')
+ , Iterators = require('./_iterators')
+ , TO_STRING_TAG = require('./_wks')('toStringTag');
+
+for(var collections = ['NodeList', 'DOMTokenList', 'MediaList', 'StyleSheetList', 'CSSRuleList'], i = 0; i < 5; i++){
+ var NAME = collections[i]
+ , Collection = global[NAME]
+ , proto = Collection && Collection.prototype;
+ if(proto && !proto[TO_STRING_TAG])hide(proto, TO_STRING_TAG, NAME);
+ Iterators[NAME] = Iterators.Array;
+}
+},{"./_global":30,"./_hide":32,"./_iterators":44,"./_wks":74,"./es6.array.iterator":76}],83:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+
+var _extends2 = require("babel-runtime/helpers/extends");
+
+var _extends3 = _interopRequireDefault(_extends2);
+
+var _collection = require("./collection");
+
+var _collection2 = _interopRequireDefault(_collection);
+
+var _base = require("./adapters/base");
+
+var _base2 = _interopRequireDefault(_base);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const DEFAULT_BUCKET_NAME = "default";
+const DEFAULT_REMOTE = "http://localhost:8888/v1";
+
+/**
+ * KintoBase class.
+ */
+class KintoBase {
+ /**
+ * Provides a public access to the base adapter class. Users can create a
+ * custom DB adapter by extending {@link BaseAdapter}.
+ *
+ * @type {Object}
+ */
+ static get adapters() {
+ return {
+ BaseAdapter: _base2.default
+ };
+ }
+
+ /**
+ * Synchronization strategies. Available strategies are:
+ *
+ * - `MANUAL`: Conflicts will be reported in a dedicated array.
+ * - `SERVER_WINS`: Conflicts are resolved using remote data.
+ * - `CLIENT_WINS`: Conflicts are resolved using local data.
+ *
+ * @type {Object}
+ */
+ static get syncStrategy() {
+ return _collection2.default.strategy;
+ }
+
+ /**
+ * Constructor.
+ *
+ * Options:
+ * - `{String}` `remote` The server URL to use.
+ * - `{String}` `bucket` The collection bucket name.
+ * - `{EventEmitter}` `events` Events handler.
+ * - `{BaseAdapter}` `adapter` The base DB adapter class.
+ * - `{Object}` `adapterOptions` Options given to the adapter.
+ * - `{String}` `dbPrefix` The DB name prefix.
+ * - `{Object}` `headers` The HTTP headers to use.
+ * - `{String}` `requestMode` The HTTP CORS mode to use.
+ * - `{Number}` `timeout` The requests timeout in ms (default: `5000`).
+ *
+ * @param {Object} options The options object.
+ */
+ constructor(options = {}) {
+ const defaults = {
+ bucket: DEFAULT_BUCKET_NAME,
+ remote: DEFAULT_REMOTE
+ };
+ this._options = (0, _extends3.default)({}, defaults, options);
+ if (!this._options.adapter) {
+ throw new Error("No adapter provided");
+ }
+
+ const { remote, events, headers, requestMode, timeout, ApiClass } = this._options;
+
+ // public properties
+
+ /**
+ * The kinto HTTP client instance.
+ * @type {KintoClient}
+ */
+ this.api = new ApiClass(remote, { events, headers, requestMode, timeout });
+ /**
+ * The event emitter instance.
+ * @type {EventEmitter}
+ */
+ this.events = this._options.events;
+ }
+
+ /**
+ * Creates a {@link Collection} instance. The second (optional) parameter
+ * will set collection-level options like e.g. `remoteTransformers`.
+ *
+ * @param {String} collName The collection name.
+ * @param {Object} options May contain the following fields:
+ * remoteTransformers: Array<RemoteTransformer>
+ * @return {Collection}
+ */
+ collection(collName, options = {}) {
+ if (!collName) {
+ throw new Error("missing collection name");
+ }
+
+ const bucket = this._options.bucket;
+ return new _collection2.default(bucket, collName, this.api, {
+ events: this._options.events,
+ adapter: this._options.adapter,
+ adapterOptions: this._options.adapterOptions,
+ dbPrefix: this._options.dbPrefix,
+ idSchema: options.idSchema,
+ remoteTransformers: options.remoteTransformers,
+ hooks: options.hooks
+ });
+ }
+}
+exports.default = KintoBase;
+
+},{"./adapters/base":85,"./collection":86,"babel-runtime/helpers/extends":8}],84:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+
+var _asyncToGenerator2 = require("babel-runtime/helpers/asyncToGenerator");
+
+var _asyncToGenerator3 = _interopRequireDefault(_asyncToGenerator2);
+
+var _promise = require("babel-runtime/core-js/promise");
+
+var _promise2 = _interopRequireDefault(_promise);
+
+var _keys = require("babel-runtime/core-js/object/keys");
+
+var _keys2 = _interopRequireDefault(_keys);
+
+var _base = require("./base.js");
+
+var _base2 = _interopRequireDefault(_base);
+
+var _utils = require("../utils");
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const INDEXED_FIELDS = ["id", "_status", "last_modified"];
+
+/**
+ * IDB cursor handlers.
+ * @type {Object}
+ */
+const cursorHandlers = {
+ all(filters, done) {
+ const results = [];
+ return function (event) {
+ const cursor = event.target.result;
+ if (cursor) {
+ if ((0, _utils.filterObject)(filters, cursor.value)) {
+ results.push(cursor.value);
+ }
+ cursor.continue();
+ } else {
+ done(results);
+ }
+ };
+ },
+
+ in(values, done) {
+ if (values.length === 0) {
+ return done([]);
+ }
+ const sortedValues = [].slice.call(values).sort();
+ const results = [];
+ return function (event) {
+ const cursor = event.target.result;
+ if (!cursor) {
+ done(results);
+ return;
+ }
+ const { key, value } = cursor;
+ let i = 0;
+ while (key > sortedValues[i]) {
+ // The cursor has passed beyond this key. Check next.
+ ++i;
+ if (i === sortedValues.length) {
+ done(results); // There is no next. Stop searching.
+ return;
+ }
+ }
+ if (key === sortedValues[i]) {
+ results.push(value);
+ cursor.continue();
+ } else {
+ cursor.continue(sortedValues[i]);
+ }
+ };
+ }
+};
+
+/**
+ * Extract from filters definition the first indexed field. Since indexes were
+ * created on single-columns, extracting a single one makes sense.
+ *
+ * @param {Object} filters The filters object.
+ * @return {String|undefined}
+ */
+function findIndexedField(filters) {
+ const filteredFields = (0, _keys2.default)(filters);
+ const indexedFields = filteredFields.filter(field => {
+ return INDEXED_FIELDS.indexOf(field) !== -1;
+ });
+ return indexedFields[0];
+}
+
+/**
+ * Creates an IDB request and attach it the appropriate cursor event handler to
+ * perform a list query.
+ *
+ * Multiple matching values are handled by passing an array.
+ *
+ * @param {IDBStore} store The IDB store.
+ * @param {String|undefined} indexField The indexed field to query, if any.
+ * @param {Any} value The value to filter, if any.
+ * @param {Object} filters More filters.
+ * @param {Function} done The operation completion handler.
+ * @return {IDBRequest}
+ */
+function createListRequest(store, indexField, value, filters, done) {
+ if (!indexField) {
+ // Get all records.
+ const request = store.openCursor();
+ request.onsuccess = cursorHandlers.all(filters, done);
+ return request;
+ }
+
+ // WHERE IN equivalent clause
+ if (Array.isArray(value)) {
+ const request = store.index(indexField).openCursor();
+ request.onsuccess = cursorHandlers.in(value, done);
+ return request;
+ }
+
+ // WHERE field = value clause
+ const request = store.index(indexField).openCursor(IDBKeyRange.only(value));
+ request.onsuccess = cursorHandlers.all(filters, done);
+ return request;
+}
+
+/**
+ * IndexedDB adapter.
+ *
+ * This adapter doesn't support any options.
+ */
+class IDB extends _base2.default {
+ /**
+ * Constructor.
+ *
+ * @param {String} dbname The database nale.
+ */
+ constructor(dbname) {
+ super();
+ this._db = null;
+ // public properties
+ /**
+ * The database name.
+ * @type {String}
+ */
+ this.dbname = dbname;
+ }
+
+ _handleError(method, err) {
+ const error = new Error(method + "() " + err.message);
+ error.stack = err.stack;
+ throw error;
+ }
+
+ /**
+ * Ensures a connection to the IndexedDB database has been opened.
+ *
+ * @override
+ * @return {Promise}
+ */
+ open() {
+ if (this._db) {
+ return _promise2.default.resolve(this);
+ }
+ return new _promise2.default((resolve, reject) => {
+ const request = indexedDB.open(this.dbname, 1);
+ request.onupgradeneeded = event => {
+ // DB object
+ const db = event.target.result;
+ // Main collection store
+ const collStore = db.createObjectStore(this.dbname, {
+ keyPath: "id"
+ });
+ // Primary key (generated by IdSchema, UUID by default)
+ collStore.createIndex("id", "id", { unique: true });
+ // Local record status ("synced", "created", "updated", "deleted")
+ collStore.createIndex("_status", "_status");
+ // Last modified field
+ collStore.createIndex("last_modified", "last_modified");
+
+ // Metadata store
+ const metaStore = db.createObjectStore("__meta__", {
+ keyPath: "name"
+ });
+ metaStore.createIndex("name", "name", { unique: true });
+ };
+ request.onerror = event => reject(event.target.error);
+ request.onsuccess = event => {
+ this._db = event.target.result;
+ resolve(this);
+ };
+ });
+ }
+
+ /**
+ * Closes current connection to the database.
+ *
+ * @override
+ * @return {Promise}
+ */
+ close() {
+ if (this._db) {
+ this._db.close(); // indexedDB.close is synchronous
+ this._db = null;
+ }
+ return super.close();
+ }
+
+ /**
+ * Returns a transaction and a store objects for this collection.
+ *
+ * To determine if a transaction has completed successfully, we should rather
+ * listen to the transaction’s complete event rather than the IDBObjectStore
+ * request’s success event, because the transaction may still fail after the
+ * success event fires.
+ *
+ * @param {String} mode Transaction mode ("readwrite" or undefined)
+ * @param {String|null} name Store name (defaults to coll name)
+ * @return {Object}
+ */
+ prepare(mode = undefined, name = null) {
+ const storeName = name || this.dbname;
+ // On Safari, calling IDBDatabase.transaction with mode == undefined raises
+ // a TypeError.
+ const transaction = mode ? this._db.transaction([storeName], mode) : this._db.transaction([storeName]);
+ const store = transaction.objectStore(storeName);
+ return { transaction, store };
+ }
+
+ /**
+ * Deletes every records in the current collection.
+ *
+ * @override
+ * @return {Promise}
+ */
+ clear() {
+ var _this = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ try {
+ yield _this.open();
+ return new _promise2.default(function (resolve, reject) {
+ const { transaction, store } = _this.prepare("readwrite");
+ store.clear();
+ transaction.onerror = function (event) {
+ return reject(new Error(event.target.error));
+ };
+ transaction.oncomplete = function () {
+ return resolve();
+ };
+ });
+ } catch (e) {
+ _this._handleError("clear", e);
+ }
+ })();
+ }
+
+ /**
+ * Executes the set of synchronous CRUD operations described in the provided
+ * callback within an IndexedDB transaction, for current db store.
+ *
+ * The callback will be provided an object exposing the following synchronous
+ * CRUD operation methods: get, create, update, delete.
+ *
+ * Important note: because limitations in IndexedDB implementations, no
+ * asynchronous code should be performed within the provided callback; the
+ * promise will therefore be rejected if the callback returns a Promise.
+ *
+ * Options:
+ * - {Array} preload: The list of record IDs to fetch and make available to
+ * the transaction object get() method (default: [])
+ *
+ * @example
+ * const db = new IDB("example");
+ * db.execute(transaction => {
+ * transaction.create({id: 1, title: "foo"});
+ * transaction.update({id: 2, title: "bar"});
+ * transaction.delete(3);
+ * return "foo";
+ * })
+ * .catch(console.error.bind(console));
+ * .then(console.log.bind(console)); // => "foo"
+ *
+ * @param {Function} callback The operation description callback.
+ * @param {Object} options The options object.
+ * @return {Promise}
+ */
+ execute(callback, options = { preload: [] }) {
+ var _this2 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ // Transactions in IndexedDB are autocommited when a callback does not
+ // perform any additional operation.
+ // The way Promises are implemented in Firefox (see https://bugzilla.mozilla.org/show_bug.cgi?id=1193394)
+ // prevents using within an opened transaction.
+ // To avoid managing asynchronocity in the specified `callback`, we preload
+ // a list of record in order to execute the `callback` synchronously.
+ // See also:
+ // - http://stackoverflow.com/a/28388805/330911
+ // - http://stackoverflow.com/a/10405196
+ // - https://jakearchibald.com/2015/tasks-microtasks-queues-and-schedules/
+ yield _this2.open();
+ return new _promise2.default(function (resolve, reject) {
+ // Start transaction.
+ const { transaction, store } = _this2.prepare("readwrite");
+ // Preload specified records using index.
+ const ids = options.preload;
+ store.index("id").openCursor().onsuccess = cursorHandlers.in(ids, function (records) {
+ // Store obtained records by id.
+ const preloaded = records.reduce(function (acc, record) {
+ acc[record.id] = record;
+ return acc;
+ }, {});
+ // Expose a consistent API for every adapter instead of raw store methods.
+ const proxy = transactionProxy(store, preloaded);
+ // The callback is executed synchronously within the same transaction.
+ let result;
+ try {
+ result = callback(proxy);
+ } catch (e) {
+ transaction.abort();
+ reject(e);
+ }
+ if (result instanceof _promise2.default) {
+ // XXX: investigate how to provide documentation details in error.
+ reject(new Error("execute() callback should not return a Promise."));
+ }
+ // XXX unsure if we should manually abort the transaction on error
+ transaction.onerror = function (event) {
+ return reject(new Error(event.target.error));
+ };
+ transaction.oncomplete = function (event) {
+ return resolve(result);
+ };
+ });
+ });
+ })();
+ }
+
+ /**
+ * Retrieve a record by its primary key from the IndexedDB database.
+ *
+ * @override
+ * @param {String} id The record id.
+ * @return {Promise}
+ */
+ get(id) {
+ var _this3 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ try {
+ yield _this3.open();
+ return new _promise2.default(function (resolve, reject) {
+ const { transaction, store } = _this3.prepare();
+ const request = store.get(id);
+ transaction.onerror = function (event) {
+ return reject(new Error(event.target.error));
+ };
+ transaction.oncomplete = function () {
+ return resolve(request.result);
+ };
+ });
+ } catch (e) {
+ _this3._handleError("get", e);
+ }
+ })();
+ }
+
+ /**
+ * Lists all records from the IndexedDB database.
+ *
+ * @override
+ * @return {Promise}
+ */
+ list(params = { filters: {} }) {
+ var _this4 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ const { filters } = params;
+ const indexField = findIndexedField(filters);
+ const value = filters[indexField];
+ try {
+ yield _this4.open();
+ const results = yield new _promise2.default(function (resolve, reject) {
+ let results = [];
+ // If `indexField` was used already, don't filter again.
+ const remainingFilters = (0, _utils.omitKeys)(filters, indexField);
+
+ const { transaction, store } = _this4.prepare();
+ createListRequest(store, indexField, value, remainingFilters, function (_results) {
+ // we have received all requested records, parking them within
+ // current scope
+ results = _results;
+ });
+ transaction.onerror = function (event) {
+ return reject(new Error(event.target.error));
+ };
+ transaction.oncomplete = function (event) {
+ return resolve(results);
+ };
+ });
+
+ // The resulting list of records is sorted.
+ // XXX: with some efforts, this could be fully implemented using IDB API.
+ return params.order ? (0, _utils.sortObjects)(params.order, results) : results;
+ } catch (e) {
+ _this4._handleError("list", e);
+ }
+ })();
+ }
+
+ /**
+ * Store the lastModified value into metadata store.
+ *
+ * @override
+ * @param {Number} lastModified
+ * @return {Promise}
+ */
+ saveLastModified(lastModified) {
+ var _this5 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ const value = parseInt(lastModified, 10) || null;
+ yield _this5.open();
+ return new _promise2.default(function (resolve, reject) {
+ const { transaction, store } = _this5.prepare("readwrite", "__meta__");
+ store.put({ name: "lastModified", value: value });
+ transaction.onerror = function (event) {
+ return reject(event.target.error);
+ };
+ transaction.oncomplete = function (event) {
+ return resolve(value);
+ };
+ });
+ })();
+ }
+
+ /**
+ * Retrieve saved lastModified value.
+ *
+ * @override
+ * @return {Promise}
+ */
+ getLastModified() {
+ var _this6 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ yield _this6.open();
+ return new _promise2.default(function (resolve, reject) {
+ const { transaction, store } = _this6.prepare(undefined, "__meta__");
+ const request = store.get("lastModified");
+ transaction.onerror = function (event) {
+ return reject(event.target.error);
+ };
+ transaction.oncomplete = function (event) {
+ resolve(request.result && request.result.value || null);
+ };
+ });
+ })();
+ }
+
+ /**
+ * Load a dump of records exported from a server.
+ *
+ * @abstract
+ * @return {Promise}
+ */
+ loadDump(records) {
+ var _this7 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ try {
+ yield _this7.execute(function (transaction) {
+ records.forEach(function (record) {
+ return transaction.update(record);
+ });
+ });
+ const previousLastModified = yield _this7.getLastModified();
+ const lastModified = Math.max(...records.map(function (record) {
+ return record.last_modified;
+ }));
+ if (lastModified > previousLastModified) {
+ yield _this7.saveLastModified(lastModified);
+ }
+ return records;
+ } catch (e) {
+ _this7._handleError("loadDump", e);
+ }
+ })();
+ }
+}
+
+exports.default = IDB; /**
+ * IDB transaction proxy.
+ *
+ * @param {IDBStore} store The IndexedDB database store.
+ * @param {Array} preloaded The list of records to make available to
+ * get() (default: []).
+ * @return {Object}
+ */
+
+function transactionProxy(store, preloaded = []) {
+ return {
+ create(record) {
+ store.add(record);
+ },
+
+ update(record) {
+ store.put(record);
+ },
+
+ delete(id) {
+ store.delete(id);
+ },
+
+ get(id) {
+ return preloaded[id];
+ }
+ };
+}
+
+},{"../utils":87,"./base.js":85,"babel-runtime/core-js/object/keys":5,"babel-runtime/core-js/promise":6,"babel-runtime/helpers/asyncToGenerator":7}],85:[function(require,module,exports){
+"use strict";
+
+/**
+ * Base db adapter.
+ *
+ * @abstract
+ */
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+
+var _promise = require("babel-runtime/core-js/promise");
+
+var _promise2 = _interopRequireDefault(_promise);
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+class BaseAdapter {
+ /**
+ * Opens a connection to the database.
+ *
+ * @abstract
+ * @return {Promise}
+ */
+ open() {
+ return _promise2.default.resolve();
+ }
+
+ /**
+ * Closes current connection to the database.
+ *
+ * @abstract
+ * @return {Promise}
+ */
+ close() {
+ return _promise2.default.resolve();
+ }
+
+ /**
+ * Deletes every records present in the database.
+ *
+ * @abstract
+ * @return {Promise}
+ */
+ clear() {
+ throw new Error("Not Implemented.");
+ }
+
+ /**
+ * Executes a batch of operations within a single transaction.
+ *
+ * @abstract
+ * @param {Function} callback The operation callback.
+ * @param {Object} options The options object.
+ * @return {Promise}
+ */
+ execute(callback, options = { preload: [] }) {
+ throw new Error("Not Implemented.");
+ }
+
+ /**
+ * Retrieve a record by its primary key from the database.
+ *
+ * @abstract
+ * @param {String} id The record id.
+ * @return {Promise}
+ */
+ get(id) {
+ throw new Error("Not Implemented.");
+ }
+
+ /**
+ * Lists all records from the database.
+ *
+ * @abstract
+ * @param {Object} params The filters and order to apply to the results.
+ * @return {Promise}
+ */
+ list(params = { filters: {}, order: "" }) {
+ throw new Error("Not Implemented.");
+ }
+
+ /**
+ * Store the lastModified value.
+ *
+ * @abstract
+ * @param {Number} lastModified
+ * @return {Promise}
+ */
+ saveLastModified(lastModified) {
+ throw new Error("Not Implemented.");
+ }
+
+ /**
+ * Retrieve saved lastModified value.
+ *
+ * @abstract
+ * @return {Promise}
+ */
+ getLastModified() {
+ throw new Error("Not Implemented.");
+ }
+
+ /**
+ * Load a dump of records exported from a server.
+ *
+ * @abstract
+ * @return {Promise}
+ */
+ loadDump(records) {
+ throw new Error("Not Implemented.");
+ }
+}
+exports.default = BaseAdapter;
+
+},{"babel-runtime/core-js/promise":6}],86:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.CollectionTransaction = exports.SyncResultObject = undefined;
+
+var _stringify = require("babel-runtime/core-js/json/stringify");
+
+var _stringify2 = _interopRequireDefault(_stringify);
+
+var _promise = require("babel-runtime/core-js/promise");
+
+var _promise2 = _interopRequireDefault(_promise);
+
+var _asyncToGenerator2 = require("babel-runtime/helpers/asyncToGenerator");
+
+var _asyncToGenerator3 = _interopRequireDefault(_asyncToGenerator2);
+
+var _extends2 = require("babel-runtime/helpers/extends");
+
+var _extends3 = _interopRequireDefault(_extends2);
+
+var _assign = require("babel-runtime/core-js/object/assign");
+
+var _assign2 = _interopRequireDefault(_assign);
+
+exports.recordsEqual = recordsEqual;
+
+var _base = require("./adapters/base");
+
+var _base2 = _interopRequireDefault(_base);
+
+var _IDB = require("./adapters/IDB");
+
+var _IDB2 = _interopRequireDefault(_IDB);
+
+var _utils = require("./utils");
+
+var _uuid = require("uuid");
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const RECORD_FIELDS_TO_CLEAN = ["_status"];
+const AVAILABLE_HOOKS = ["incoming-changes"];
+
+/**
+ * Compare two records omitting local fields and synchronization
+ * attributes (like _status and last_modified)
+ * @param {Object} a A record to compare.
+ * @param {Object} b A record to compare.
+ * @return {boolean}
+ */
+function recordsEqual(a, b, localFields = []) {
+ const fieldsToClean = RECORD_FIELDS_TO_CLEAN.concat(["last_modified"]).concat(localFields);
+ const cleanLocal = r => (0, _utils.omitKeys)(r, fieldsToClean);
+ return (0, _utils.deepEqual)(cleanLocal(a), cleanLocal(b));
+}
+
+/**
+ * Synchronization result object.
+ */
+class SyncResultObject {
+ /**
+ * Object default values.
+ * @type {Object}
+ */
+ static get defaults() {
+ return {
+ ok: true,
+ lastModified: null,
+ errors: [],
+ created: [],
+ updated: [],
+ deleted: [],
+ published: [],
+ conflicts: [],
+ skipped: [],
+ resolved: []
+ };
+ }
+
+ /**
+ * Public constructor.
+ */
+ constructor() {
+ /**
+ * Current synchronization result status; becomes `false` when conflicts or
+ * errors are registered.
+ * @type {Boolean}
+ */
+ this.ok = true;
+ (0, _assign2.default)(this, SyncResultObject.defaults);
+ }
+
+ /**
+ * Adds entries for a given result type.
+ *
+ * @param {String} type The result type.
+ * @param {Array} entries The result entries.
+ * @return {SyncResultObject}
+ */
+ add(type, entries) {
+ if (!Array.isArray(this[type])) {
+ return;
+ }
+ // Deduplicate entries by id. If the values don't have `id` attribute, just
+ // keep all.
+ const deduplicated = this[type].concat(entries).reduce((acc, cur) => {
+ const existing = acc.filter(r => cur.id && r.id ? cur.id != r.id : true);
+ return existing.concat(cur);
+ }, []);
+ this[type] = deduplicated;
+ this.ok = this.errors.length + this.conflicts.length === 0;
+ return this;
+ }
+
+ /**
+ * Reinitializes result entries for a given result type.
+ *
+ * @param {String} type The result type.
+ * @return {SyncResultObject}
+ */
+ reset(type) {
+ this[type] = SyncResultObject.defaults[type];
+ this.ok = this.errors.length + this.conflicts.length === 0;
+ return this;
+ }
+}
+
+exports.SyncResultObject = SyncResultObject;
+function createUUIDSchema() {
+ return {
+ generate() {
+ return (0, _uuid.v4)();
+ },
+
+ validate(id) {
+ return (0, _utils.isUUID)(id);
+ }
+ };
+}
+
+function markStatus(record, status) {
+ return (0, _extends3.default)({}, record, { _status: status });
+}
+
+function markDeleted(record) {
+ return markStatus(record, "deleted");
+}
+
+function markSynced(record) {
+ return markStatus(record, "synced");
+}
+
+/**
+ * Import a remote change into the local database.
+ *
+ * @param {IDBTransactionProxy} transaction The transaction handler.
+ * @param {Object} remote The remote change object to import.
+ * @param {Array<String>} localFields The list of fields that remain local.
+ * @return {Object}
+ */
+function importChange(transaction, remote, localFields) {
+ const local = transaction.get(remote.id);
+ if (!local) {
+ // Not found locally but remote change is marked as deleted; skip to
+ // avoid recreation.
+ if (remote.deleted) {
+ return { type: "skipped", data: remote };
+ }
+ const synced = markSynced(remote);
+ transaction.create(synced);
+ return { type: "created", data: synced };
+ }
+ // Compare local and remote, ignoring local fields.
+ const isIdentical = recordsEqual(local, remote, localFields);
+ // Apply remote changes on local record.
+ const synced = (0, _extends3.default)({}, local, markSynced(remote));
+ // Detect or ignore conflicts if record has also been modified locally.
+ if (local._status !== "synced") {
+ // Locally deleted, unsynced: scheduled for remote deletion.
+ if (local._status === "deleted") {
+ return { type: "skipped", data: local };
+ }
+ if (isIdentical) {
+ // If records are identical, import anyway, so we bump the
+ // local last_modified value from the server and set record
+ // status to "synced".
+ transaction.update(synced);
+ return { type: "updated", data: { old: local, new: synced } };
+ }
+ if (local.last_modified !== undefined && local.last_modified === remote.last_modified) {
+ // If our local version has the same last_modified as the remote
+ // one, this represents an object that corresponds to a resolved
+ // conflict. Our local version represents the final output, so
+ // we keep that one. (No transaction operation to do.)
+ // But if our last_modified is undefined,
+ // that means we've created the same object locally as one on
+ // the server, which *must* be a conflict.
+ return { type: "void" };
+ }
+ return {
+ type: "conflicts",
+ data: { type: "incoming", local: local, remote: remote }
+ };
+ }
+ // Local record was synced.
+ if (remote.deleted) {
+ transaction.delete(remote.id);
+ return { type: "deleted", data: local };
+ }
+ // Import locally.
+ transaction.update(synced);
+ // if identical, simply exclude it from all SyncResultObject lists
+ const type = isIdentical ? "void" : "updated";
+ return { type, data: { old: local, new: synced } };
+}
+
+/**
+ * Abstracts a collection of records stored in the local database, providing
+ * CRUD operations and synchronization helpers.
+ */
+class Collection {
+ /**
+ * Constructor.
+ *
+ * Options:
+ * - `{BaseAdapter} adapter` The DB adapter (default: `IDB`)
+ * - `{String} dbPrefix` The DB name prefix (default: `""`)
+ *
+ * @param {String} bucket The bucket identifier.
+ * @param {String} name The collection name.
+ * @param {Api} api The Api instance.
+ * @param {Object} options The options object.
+ */
+ constructor(bucket, name, api, options = {}) {
+ this._bucket = bucket;
+ this._name = name;
+ this._lastModified = null;
+
+ const DBAdapter = options.adapter || _IDB2.default;
+ if (!DBAdapter) {
+ throw new Error("No adapter provided");
+ }
+ const dbPrefix = options.dbPrefix || "";
+ const db = new DBAdapter(`${ dbPrefix }${ bucket }/${ name }`, options.adapterOptions);
+ if (!(db instanceof _base2.default)) {
+ throw new Error("Unsupported adapter.");
+ }
+ // public properties
+ /**
+ * The db adapter instance
+ * @type {BaseAdapter}
+ */
+ this.db = db;
+ /**
+ * The Api instance.
+ * @type {KintoClient}
+ */
+ this.api = api;
+ /**
+ * The event emitter instance.
+ * @type {EventEmitter}
+ */
+ this.events = options.events;
+ /**
+ * The IdSchema instance.
+ * @type {Object}
+ */
+ this.idSchema = this._validateIdSchema(options.idSchema);
+ /**
+ * The list of remote transformers.
+ * @type {Array}
+ */
+ this.remoteTransformers = this._validateRemoteTransformers(options.remoteTransformers);
+ /**
+ * The list of hooks.
+ * @type {Object}
+ */
+ this.hooks = this._validateHooks(options.hooks);
+ /**
+ * The list of fields names that will remain local.
+ * @type {Array}
+ */
+ this.localFields = options.localFields || [];
+ }
+
+ /**
+ * The collection name.
+ * @type {String}
+ */
+ get name() {
+ return this._name;
+ }
+
+ /**
+ * The bucket name.
+ * @type {String}
+ */
+ get bucket() {
+ return this._bucket;
+ }
+
+ /**
+ * The last modified timestamp.
+ * @type {Number}
+ */
+ get lastModified() {
+ return this._lastModified;
+ }
+
+ /**
+ * Synchronization strategies. Available strategies are:
+ *
+ * - `MANUAL`: Conflicts will be reported in a dedicated array.
+ * - `SERVER_WINS`: Conflicts are resolved using remote data.
+ * - `CLIENT_WINS`: Conflicts are resolved using local data.
+ *
+ * @type {Object}
+ */
+ static get strategy() {
+ return {
+ CLIENT_WINS: "client_wins",
+ SERVER_WINS: "server_wins",
+ MANUAL: "manual"
+ };
+ }
+
+ /**
+ * Validates an idSchema.
+ *
+ * @param {Object|undefined} idSchema
+ * @return {Object}
+ */
+ _validateIdSchema(idSchema) {
+ if (typeof idSchema === "undefined") {
+ return createUUIDSchema();
+ }
+ if (typeof idSchema !== "object") {
+ throw new Error("idSchema must be an object.");
+ } else if (typeof idSchema.generate !== "function") {
+ throw new Error("idSchema must provide a generate function.");
+ } else if (typeof idSchema.validate !== "function") {
+ throw new Error("idSchema must provide a validate function.");
+ }
+ return idSchema;
+ }
+
+ /**
+ * Validates a list of remote transformers.
+ *
+ * @param {Array|undefined} remoteTransformers
+ * @return {Array}
+ */
+ _validateRemoteTransformers(remoteTransformers) {
+ if (typeof remoteTransformers === "undefined") {
+ return [];
+ }
+ if (!Array.isArray(remoteTransformers)) {
+ throw new Error("remoteTransformers should be an array.");
+ }
+ return remoteTransformers.map(transformer => {
+ if (typeof transformer !== "object") {
+ throw new Error("A transformer must be an object.");
+ } else if (typeof transformer.encode !== "function") {
+ throw new Error("A transformer must provide an encode function.");
+ } else if (typeof transformer.decode !== "function") {
+ throw new Error("A transformer must provide a decode function.");
+ }
+ return transformer;
+ });
+ }
+
+ /**
+ * Validate the passed hook is correct.
+ *
+ * @param {Array|undefined} hook.
+ * @return {Array}
+ **/
+ _validateHook(hook) {
+ if (!Array.isArray(hook)) {
+ throw new Error("A hook definition should be an array of functions.");
+ }
+ return hook.map(fn => {
+ if (typeof fn !== "function") {
+ throw new Error("A hook definition should be an array of functions.");
+ }
+ return fn;
+ });
+ }
+
+ /**
+ * Validates a list of hooks.
+ *
+ * @param {Object|undefined} hooks
+ * @return {Object}
+ */
+ _validateHooks(hooks) {
+ if (typeof hooks === "undefined") {
+ return {};
+ }
+ if (Array.isArray(hooks)) {
+ throw new Error("hooks should be an object, not an array.");
+ }
+ if (typeof hooks !== "object") {
+ throw new Error("hooks should be an object.");
+ }
+
+ const validatedHooks = {};
+
+ for (let hook in hooks) {
+ if (AVAILABLE_HOOKS.indexOf(hook) === -1) {
+ throw new Error("The hook should be one of " + AVAILABLE_HOOKS.join(", "));
+ }
+ validatedHooks[hook] = this._validateHook(hooks[hook]);
+ }
+ return validatedHooks;
+ }
+
+ /**
+ * Deletes every records in the current collection and marks the collection as
+ * never synced.
+ *
+ * @return {Promise}
+ */
+ clear() {
+ var _this = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ yield _this.db.clear();
+ yield _this.db.saveLastModified(null);
+ return { data: [], permissions: {} };
+ })();
+ }
+
+ /**
+ * Encodes a record.
+ *
+ * @param {String} type Either "remote" or "local".
+ * @param {Object} record The record object to encode.
+ * @return {Promise}
+ */
+ _encodeRecord(type, record) {
+ if (!this[`${ type }Transformers`].length) {
+ return _promise2.default.resolve(record);
+ }
+ return (0, _utils.waterfall)(this[`${ type }Transformers`].map(transformer => {
+ return record => transformer.encode(record);
+ }), record);
+ }
+
+ /**
+ * Decodes a record.
+ *
+ * @param {String} type Either "remote" or "local".
+ * @param {Object} record The record object to decode.
+ * @return {Promise}
+ */
+ _decodeRecord(type, record) {
+ if (!this[`${ type }Transformers`].length) {
+ return _promise2.default.resolve(record);
+ }
+ return (0, _utils.waterfall)(this[`${ type }Transformers`].reverse().map(transformer => {
+ return record => transformer.decode(record);
+ }), record);
+ }
+
+ /**
+ * Adds a record to the local database, asserting that none
+ * already exist with this ID.
+ *
+ * Note: If either the `useRecordId` or `synced` options are true, then the
+ * record object must contain the id field to be validated. If none of these
+ * options are true, an id is generated using the current IdSchema; in this
+ * case, the record passed must not have an id.
+ *
+ * Options:
+ * - {Boolean} synced Sets record status to "synced" (default: `false`).
+ * - {Boolean} useRecordId Forces the `id` field from the record to be used,
+ * instead of one that is generated automatically
+ * (default: `false`).
+ *
+ * @param {Object} record
+ * @param {Object} options
+ * @return {Promise}
+ */
+ create(record, options = { useRecordId: false, synced: false }) {
+ // Validate the record and its ID (if any), even though this
+ // validation is also done in the CollectionTransaction method,
+ // because we need to pass the ID to preloadIds.
+ const reject = msg => _promise2.default.reject(new Error(msg));
+ if (typeof record !== "object") {
+ return reject("Record is not an object.");
+ }
+ if ((options.synced || options.useRecordId) && !record.hasOwnProperty("id")) {
+ return reject("Missing required Id; synced and useRecordId options require one");
+ }
+ if (!options.synced && !options.useRecordId && record.hasOwnProperty("id")) {
+ return reject("Extraneous Id; can't create a record having one set.");
+ }
+ const newRecord = (0, _extends3.default)({}, record, {
+ id: options.synced || options.useRecordId ? record.id : this.idSchema.generate(),
+ _status: options.synced ? "synced" : "created"
+ });
+ if (!this.idSchema.validate(newRecord.id)) {
+ return reject(`Invalid Id: ${ newRecord.id }`);
+ }
+ return this.execute(txn => txn.create(newRecord), { preloadIds: [newRecord.id] }).catch(err => {
+ if (options.useRecordId) {
+ throw new Error("Couldn't create record. It may have been virtually deleted.");
+ }
+ throw err;
+ });
+ }
+
+ /**
+ * Like {@link CollectionTransaction#update}, but wrapped in its own transaction.
+ *
+ * Options:
+ * - {Boolean} synced: Sets record status to "synced" (default: false)
+ * - {Boolean} patch: Extends the existing record instead of overwriting it
+ * (default: false)
+ *
+ * @param {Object} record
+ * @param {Object} options
+ * @return {Promise}
+ */
+ update(record, options = { synced: false, patch: false }) {
+ // Validate the record and its ID, even though this validation is
+ // also done in the CollectionTransaction method, because we need
+ // to pass the ID to preloadIds.
+ if (typeof record !== "object") {
+ return _promise2.default.reject(new Error("Record is not an object."));
+ }
+ if (!record.hasOwnProperty("id")) {
+ return _promise2.default.reject(new Error("Cannot update a record missing id."));
+ }
+ if (!this.idSchema.validate(record.id)) {
+ return _promise2.default.reject(new Error(`Invalid Id: ${ record.id }`));
+ }
+
+ return this.execute(txn => txn.update(record, options), { preloadIds: [record.id] });
+ }
+
+ /**
+ * Like {@link CollectionTransaction#upsert}, but wrapped in its own transaction.
+ *
+ * @param {Object} record
+ * @return {Promise}
+ */
+ upsert(record) {
+ // Validate the record and its ID, even though this validation is
+ // also done in the CollectionTransaction method, because we need
+ // to pass the ID to preloadIds.
+ if (typeof record !== "object") {
+ return _promise2.default.reject(new Error("Record is not an object."));
+ }
+ if (!record.hasOwnProperty("id")) {
+ return _promise2.default.reject(new Error("Cannot update a record missing id."));
+ }
+ if (!this.idSchema.validate(record.id)) {
+ return _promise2.default.reject(new Error(`Invalid Id: ${ record.id }`));
+ }
+
+ return this.execute(txn => txn.upsert(record), { preloadIds: [record.id] });
+ }
+
+ /**
+ * Like {@link CollectionTransaction#get}, but wrapped in its own transaction.
+ *
+ * Options:
+ * - {Boolean} includeDeleted: Include virtually deleted records.
+ *
+ * @param {String} id
+ * @param {Object} options
+ * @return {Promise}
+ */
+ get(id, options = { includeDeleted: false }) {
+ return this.execute(txn => txn.get(id, options), { preloadIds: [id] });
+ }
+
+ /**
+ * Like {@link CollectionTransaction#getAny}, but wrapped in its own transaction.
+ *
+ * @param {String} id
+ * @return {Promise}
+ */
+ getAny(id) {
+ return this.execute(txn => txn.getAny(id), { preloadIds: [id] });
+ }
+
+ /**
+ * Same as {@link Collection#delete}, but wrapped in its own transaction.
+ *
+ * Options:
+ * - {Boolean} virtual: When set to `true`, doesn't actually delete the record,
+ * update its `_status` attribute to `deleted` instead (default: true)
+ *
+ * @param {String} id The record's Id.
+ * @param {Object} options The options object.
+ * @return {Promise}
+ */
+ delete(id, options = { virtual: true }) {
+ return this.execute(transaction => {
+ return transaction.delete(id, options);
+ }, { preloadIds: [id] });
+ }
+
+ /**
+ * The same as {@link CollectionTransaction#deleteAny}, but wrapped
+ * in its own transaction.
+ *
+ * @param {String} id The record's Id.
+ * @return {Promise}
+ */
+ deleteAny(id) {
+ return this.execute(txn => txn.deleteAny(id), { preloadIds: [id] });
+ }
+
+ /**
+ * Lists records from the local database.
+ *
+ * Params:
+ * - {Object} filters Filter the results (default: `{}`).
+ * - {String} order The order to apply (default: `-last_modified`).
+ *
+ * Options:
+ * - {Boolean} includeDeleted: Include virtually deleted records.
+ *
+ * @param {Object} params The filters and order to apply to the results.
+ * @param {Object} options The options object.
+ * @return {Promise}
+ */
+ list(params = {}, options = { includeDeleted: false }) {
+ var _this2 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ params = (0, _extends3.default)({ order: "-last_modified", filters: {} }, params);
+ const results = yield _this2.db.list(params);
+ let data = results;
+ if (!options.includeDeleted) {
+ data = results.filter(function (record) {
+ return record._status !== "deleted";
+ });
+ }
+ return { data, permissions: {} };
+ })();
+ }
+
+ /**
+ * Imports remote changes into the local database.
+ * This method is in charge of detecting the conflicts, and resolve them
+ * according to the specified strategy.
+ * @param {SyncResultObject} syncResultObject The sync result object.
+ * @param {Array} decodedChanges The list of changes to import in the local database.
+ * @param {String} strategy The {@link Collection.strategy} (default: MANUAL)
+ * @return {Promise}
+ */
+ importChanges(syncResultObject, decodedChanges, strategy = Collection.strategy.MANUAL) {
+ var _this3 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ // Retrieve records matching change ids.
+ try {
+ const { imports, resolved } = yield _this3.db.execute(function (transaction) {
+ const imports = decodedChanges.map(function (remote) {
+ // Store remote change into local database.
+ return importChange(transaction, remote, _this3.localFields);
+ });
+ const conflicts = imports.filter(function (i) {
+ return i.type === "conflicts";
+ }).map(function (i) {
+ return i.data;
+ });
+ const resolved = _this3._handleConflicts(transaction, conflicts, strategy);
+ return { imports, resolved };
+ }, { preload: decodedChanges.map(function (record) {
+ return record.id;
+ }) });
+
+ // Lists of created/updated/deleted records
+ imports.forEach(function ({ type, data }) {
+ return syncResultObject.add(type, data);
+ });
+
+ // Automatically resolved conflicts (if not manual)
+ if (resolved.length > 0) {
+ syncResultObject.reset("conflicts").add("resolved", resolved);
+ }
+ } catch (err) {
+ const data = {
+ type: "incoming",
+ message: err.message,
+ stack: err.stack
+ };
+ // XXX one error of the whole transaction instead of per atomic op
+ syncResultObject.add("errors", data);
+ }
+
+ return syncResultObject;
+ })();
+ }
+
+ /**
+ * Imports the responses of pushed changes into the local database.
+ * Basically it stores the timestamp assigned by the server into the local
+ * database.
+ * @param {SyncResultObject} syncResultObject The sync result object.
+ * @param {Array} toApplyLocally The list of changes to import in the local database.
+ * @param {Array} conflicts The list of conflicts that have to be resolved.
+ * @param {String} strategy The {@link Collection.strategy}.
+ * @return {Promise}
+ */
+ _applyPushedResults(syncResultObject, toApplyLocally, conflicts, strategy = Collection.strategy.MANUAL) {
+ var _this4 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ const toDeleteLocally = toApplyLocally.filter(function (r) {
+ return r.deleted;
+ });
+ const toUpdateLocally = toApplyLocally.filter(function (r) {
+ return !r.deleted;
+ });
+
+ const { published, resolved } = yield _this4.db.execute(function (transaction) {
+ const updated = toUpdateLocally.map(function (record) {
+ const synced = markSynced(record);
+ transaction.update(synced);
+ return synced;
+ });
+ const deleted = toDeleteLocally.map(function (record) {
+ transaction.delete(record.id);
+ // Amend result data with the deleted attribute set
+ return { id: record.id, deleted: true };
+ });
+ const published = updated.concat(deleted);
+ // Handle conflicts, if any
+ const resolved = _this4._handleConflicts(transaction, conflicts, strategy);
+ return { published, resolved };
+ });
+
+ syncResultObject.add("published", published);
+
+ if (resolved.length > 0) {
+ syncResultObject.reset("conflicts").reset("resolved").add("resolved", resolved);
+ }
+ return syncResultObject;
+ })();
+ }
+
+ /**
+ * Handles synchronization conflicts according to specified strategy.
+ *
+ * @param {SyncResultObject} result The sync result object.
+ * @param {String} strategy The {@link Collection.strategy}.
+ * @return {Promise}
+ */
+ _handleConflicts(transaction, conflicts, strategy) {
+ if (strategy === Collection.strategy.MANUAL) {
+ return [];
+ }
+ return conflicts.map(conflict => {
+ const resolution = strategy === Collection.strategy.CLIENT_WINS ? conflict.local : conflict.remote;
+ const updated = this._resolveRaw(conflict, resolution);
+ transaction.update(updated);
+ return updated;
+ });
+ }
+
+ /**
+ * Execute a bunch of operations in a transaction.
+ *
+ * This transaction should be atomic -- either all of its operations
+ * will succeed, or none will.
+ *
+ * The argument to this function is itself a function which will be
+ * called with a {@link CollectionTransaction}. Collection methods
+ * are available on this transaction, but instead of returning
+ * promises, they are synchronous. execute() returns a Promise whose
+ * value will be the return value of the provided function.
+ *
+ * Most operations will require access to the record itself, which
+ * must be preloaded by passing its ID in the preloadIds option.
+ *
+ * Options:
+ * - {Array} preloadIds: list of IDs to fetch at the beginning of
+ * the transaction
+ *
+ * @return {Promise} Resolves with the result of the given function
+ * when the transaction commits.
+ */
+ execute(doOperations, { preloadIds = [] } = {}) {
+ for (let id of preloadIds) {
+ if (!this.idSchema.validate(id)) {
+ return _promise2.default.reject(Error(`Invalid Id: ${ id }`));
+ }
+ }
+
+ return this.db.execute(transaction => {
+ const txn = new CollectionTransaction(this, transaction);
+ const result = doOperations(txn);
+ txn.emitEvents();
+ return result;
+ }, { preload: preloadIds });
+ }
+
+ /**
+ * Resets the local records as if they were never synced; existing records are
+ * marked as newly created, deleted records are dropped.
+ *
+ * A next call to {@link Collection.sync} will thus republish the whole
+ * content of the local collection to the server.
+ *
+ * @return {Promise} Resolves with the number of processed records.
+ */
+ resetSyncStatus() {
+ var _this5 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ const unsynced = yield _this5.list({ filters: { _status: ["deleted", "synced"] }, order: "" }, { includeDeleted: true });
+ yield _this5.db.execute(function (transaction) {
+ unsynced.data.forEach(function (record) {
+ if (record._status === "deleted") {
+ // Garbage collect deleted records.
+ transaction.delete(record.id);
+ } else {
+ // Records that were synced become «created».
+ transaction.update((0, _extends3.default)({}, record, {
+ last_modified: undefined,
+ _status: "created"
+ }));
+ }
+ });
+ });
+ _this5._lastModified = null;
+ yield _this5.db.saveLastModified(null);
+ return unsynced.data.length;
+ })();
+ }
+
+ /**
+ * Returns an object containing two lists:
+ *
+ * - `toDelete`: unsynced deleted records we can safely delete;
+ * - `toSync`: local updates to send to the server.
+ *
+ * @return {Promise}
+ */
+ gatherLocalChanges() {
+ var _this6 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ const unsynced = yield _this6.list({ filters: { _status: ["created", "updated"] }, order: "" });
+ const deleted = yield _this6.list({ filters: { _status: "deleted" }, order: "" }, { includeDeleted: true });
+
+ const toSync = yield _promise2.default.all(unsynced.data.map(_this6._encodeRecord.bind(_this6, "remote")));
+ const toDelete = yield _promise2.default.all(deleted.data.map(_this6._encodeRecord.bind(_this6, "remote")));
+
+ return { toSync, toDelete };
+ })();
+ }
+
+ /**
+ * Fetch remote changes, import them to the local database, and handle
+ * conflicts according to `options.strategy`. Then, updates the passed
+ * {@link SyncResultObject} with import results.
+ *
+ * Options:
+ * - {String} strategy: The selected sync strategy.
+ *
+ * @param {KintoClient.Collection} client Kinto client Collection instance.
+ * @param {SyncResultObject} syncResultObject The sync result object.
+ * @param {Object} options
+ * @return {Promise}
+ */
+ pullChanges(client, syncResultObject, options = {}) {
+ var _this7 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ if (!syncResultObject.ok) {
+ return syncResultObject;
+ }
+
+ const since = _this7.lastModified ? _this7.lastModified : yield _this7.db.getLastModified();
+
+ options = (0, _extends3.default)({
+ strategy: Collection.strategy.MANUAL,
+ lastModified: since,
+ headers: {}
+ }, options);
+
+ // Optionally ignore some records when pulling for changes.
+ // (avoid redownloading our own changes on last step of #sync())
+ let filters;
+ if (options.exclude) {
+ // Limit the list of excluded records to the first 50 records in order
+ // to remain under de-facto URL size limit (~2000 chars).
+ // http://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers/417184#417184
+ const exclude_id = options.exclude.slice(0, 50).map(function (r) {
+ return r.id;
+ }).join(",");
+ filters = { exclude_id };
+ }
+ // First fetch remote changes from the server
+ const { data, last_modified } = yield client.listRecords({
+ // Since should be ETag (see https://github.com/Kinto/kinto.js/issues/356)
+ since: options.lastModified ? `${ options.lastModified }` : undefined,
+ headers: options.headers,
+ filters
+ });
+ // last_modified is the ETag header value (string).
+ // For retro-compatibility with first kinto.js versions
+ // parse it to integer.
+ const unquoted = last_modified ? parseInt(last_modified, 10) : undefined;
+
+ // Check if server was flushed.
+ // This is relevant for the Kinto demo server
+ // (and thus for many new comers).
+ const localSynced = options.lastModified;
+ const serverChanged = unquoted > options.lastModified;
+ const emptyCollection = data.length === 0;
+ if (!options.exclude && localSynced && serverChanged && emptyCollection) {
+ throw Error("Server has been flushed.");
+ }
+
+ syncResultObject.lastModified = unquoted;
+
+ // Decode incoming changes.
+ const decodedChanges = yield _promise2.default.all(data.map(function (change) {
+ return _this7._decodeRecord("remote", change);
+ }));
+ // Hook receives decoded records.
+ const payload = { lastModified: unquoted, changes: decodedChanges };
+ const afterHooks = yield _this7.applyHook("incoming-changes", payload);
+
+ // No change, nothing to import.
+ if (afterHooks.changes.length > 0) {
+ // Reflect these changes locally
+ yield _this7.importChanges(syncResultObject, afterHooks.changes, options.strategy);
+ }
+ return syncResultObject;
+ })();
+ }
+
+ applyHook(hookName, payload) {
+ if (typeof this.hooks[hookName] == "undefined") {
+ return _promise2.default.resolve(payload);
+ }
+ return (0, _utils.waterfall)(this.hooks[hookName].map(hook => {
+ return record => {
+ const result = hook(payload, this);
+ const resultThenable = result && typeof result.then === "function";
+ const resultChanges = result && result.hasOwnProperty("changes");
+ if (!(resultThenable || resultChanges)) {
+ throw new Error(`Invalid return value for hook: ${ (0, _stringify2.default)(result) } has no 'then()' or 'changes' properties`);
+ }
+ return result;
+ };
+ }), payload);
+ }
+
+ /**
+ * Publish local changes to the remote server and updates the passed
+ * {@link SyncResultObject} with publication results.
+ *
+ * @param {KintoClient.Collection} client Kinto client Collection instance.
+ * @param {SyncResultObject} syncResultObject The sync result object.
+ * @param {Object} changes The change object.
+ * @param {Array} changes.toDelete The list of records to delete.
+ * @param {Array} changes.toSync The list of records to create/update.
+ * @param {Object} options The options object.
+ * @return {Promise}
+ */
+ pushChanges(client, { toDelete = [], toSync }, syncResultObject, options = {}) {
+ var _this8 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ if (!syncResultObject.ok) {
+ return syncResultObject;
+ }
+ const safe = !options.strategy || options.strategy !== Collection.CLIENT_WINS;
+
+ // Perform a batch request with every changes.
+ const synced = yield client.batch(function (batch) {
+ toDelete.forEach(function (r) {
+ // never published locally deleted records should not be pusblished
+ if (r.last_modified) {
+ batch.deleteRecord(r);
+ }
+ });
+ toSync.forEach(function (r) {
+ // Clean local fields (like _status) before sending to server.
+ const published = _this8.cleanLocalFields(r);
+ if (r._status === "created") {
+ batch.createRecord(published);
+ } else {
+ batch.updateRecord(published);
+ }
+ });
+ }, { headers: options.headers, safe, aggregate: true });
+
+ // Store outgoing errors into sync result object
+ syncResultObject.add("errors", synced.errors.map(function (e) {
+ return (0, _extends3.default)({}, e, { type: "outgoing" });
+ }));
+
+ // Store outgoing conflicts into sync result object
+ const conflicts = [];
+ for (let { type, local, remote } of synced.conflicts) {
+ // Note: we ensure that local data are actually available, as they may
+ // be missing in the case of a published deletion.
+ const safeLocal = local && local.data || { id: remote.id };
+ const realLocal = yield _this8._decodeRecord("remote", safeLocal);
+ const realRemote = yield _this8._decodeRecord("remote", remote);
+ const conflict = { type, local: realLocal, remote: realRemote };
+ conflicts.push(conflict);
+ }
+ syncResultObject.add("conflicts", conflicts);
+
+ // Records that must be deleted are either deletions that were pushed
+ // to server (published) or deleted records that were never pushed (skipped).
+ const missingRemotely = synced.skipped.map(function (r) {
+ return (0, _extends3.default)({}, r, { deleted: true });
+ });
+
+ // For created and updated records, the last_modified coming from server
+ // will be stored locally.
+ // Reflect publication results locally using the response from
+ // the batch request.
+ const published = synced.published.map(function (c) {
+ return c.data;
+ });
+ const toApplyLocally = published.concat(missingRemotely);
+
+ // Apply the decode transformers, if any
+ const decoded = yield _promise2.default.all(toApplyLocally.map(function (record) {
+ return _this8._decodeRecord("remote", record);
+ }));
+
+ // We have to update the local records with the responses of the server
+ // (eg. last_modified values etc.).
+ if (decoded.length > 0 || conflicts.length > 0) {
+ yield _this8._applyPushedResults(syncResultObject, decoded, conflicts, options.strategy);
+ }
+
+ return syncResultObject;
+ })();
+ }
+
+ /**
+ * Return a copy of the specified record without the local fields.
+ *
+ * @param {Object} record A record with potential local fields.
+ * @return {Object}
+ */
+ cleanLocalFields(record) {
+ const localKeys = RECORD_FIELDS_TO_CLEAN.concat(this.localFields);
+ return (0, _utils.omitKeys)(record, localKeys);
+ }
+
+ /**
+ * Resolves a conflict, updating local record according to proposed
+ * resolution — keeping remote record `last_modified` value as a reference for
+ * further batch sending.
+ *
+ * @param {Object} conflict The conflict object.
+ * @param {Object} resolution The proposed record.
+ * @return {Promise}
+ */
+ resolve(conflict, resolution) {
+ return this.db.execute(transaction => {
+ const updated = this._resolveRaw(conflict, resolution);
+ transaction.update(updated);
+ return { data: updated, permissions: {} };
+ });
+ }
+
+ /**
+ * @private
+ */
+ _resolveRaw(conflict, resolution) {
+ const resolved = (0, _extends3.default)({}, resolution, {
+ // Ensure local record has the latest authoritative timestamp
+ last_modified: conflict.remote.last_modified
+ });
+ // If the resolution object is strictly equal to the
+ // remote record, then we can mark it as synced locally.
+ // Otherwise, mark it as updated (so that the resolution is pushed).
+ const synced = (0, _utils.deepEqual)(resolved, conflict.remote);
+ return markStatus(resolved, synced ? "synced" : "updated");
+ }
+
+ /**
+ * Synchronize remote and local data. The promise will resolve with a
+ * {@link SyncResultObject}, though will reject:
+ *
+ * - if the server is currently backed off;
+ * - if the server has been detected flushed.
+ *
+ * Options:
+ * - {Object} headers: HTTP headers to attach to outgoing requests.
+ * - {Collection.strategy} strategy: See {@link Collection.strategy}.
+ * - {Boolean} ignoreBackoff: Force synchronization even if server is currently
+ * backed off.
+ * - {String} bucket: The remove bucket id to use (default: null)
+ * - {String} collection: The remove collection id to use (default: null)
+ * - {String} remote The remote Kinto server endpoint to use (default: null).
+ *
+ * @param {Object} options Options.
+ * @return {Promise}
+ * @throws {Error} If an invalid remote option is passed.
+ */
+ sync(options = {
+ strategy: Collection.strategy.MANUAL,
+ headers: {},
+ ignoreBackoff: false,
+ bucket: null,
+ collection: null,
+ remote: null
+ }) {
+ var _this9 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ const previousRemote = _this9.api.remote;
+ if (options.remote) {
+ // Note: setting the remote ensures it's valid, throws when invalid.
+ _this9.api.remote = options.remote;
+ }
+ if (!options.ignoreBackoff && _this9.api.backoff > 0) {
+ const seconds = Math.ceil(_this9.api.backoff / 1000);
+ return _promise2.default.reject(new Error(`Server is asking clients to back off; retry in ${ seconds }s or use the ignoreBackoff option.`));
+ }
+
+ const client = _this9.api.bucket(options.bucket || _this9.bucket).collection(options.collection || _this9.name);
+
+ const result = new SyncResultObject();
+ try {
+ // Fetch last changes from the server.
+ yield _this9.pullChanges(client, result, options);
+ const { lastModified } = result;
+
+ // Fetch local changes
+ const { toDelete, toSync } = yield _this9.gatherLocalChanges();
+
+ // Publish local changes and pull local resolutions
+ yield _this9.pushChanges(client, { toDelete, toSync }, result, options);
+
+ // Publish local resolution of push conflicts to server (on CLIENT_WINS)
+ const resolvedUnsynced = result.resolved.filter(function (r) {
+ return r._status !== "synced";
+ });
+ if (resolvedUnsynced.length > 0) {
+ const resolvedEncoded = yield _promise2.default.all(resolvedUnsynced.map(_this9._encodeRecord.bind(_this9, "remote")));
+ yield _this9.pushChanges(client, { toSync: resolvedEncoded }, result, options);
+ }
+ // Perform a last pull to catch changes that occured after the last pull,
+ // while local changes were pushed. Do not do it nothing was pushed.
+ if (result.published.length > 0) {
+ // Avoid redownloading our own changes during the last pull.
+ const pullOpts = (0, _extends3.default)({}, options, { lastModified, exclude: result.published });
+ yield _this9.pullChanges(client, result, pullOpts);
+ }
+
+ // Don't persist lastModified value if any conflict or error occured
+ if (result.ok) {
+ // No conflict occured, persist collection's lastModified value
+ _this9._lastModified = yield _this9.db.saveLastModified(result.lastModified);
+ }
+ } finally {
+ // Ensure API default remote is reverted if a custom one's been used
+ _this9.api.remote = previousRemote;
+ }
+ return result;
+ })();
+ }
+
+ /**
+ * Load a list of records already synced with the remote server.
+ *
+ * The local records which are unsynced or whose timestamp is either missing
+ * or superior to those being loaded will be ignored.
+ *
+ * @param {Array} records The previously exported list of records to load.
+ * @return {Promise} with the effectively imported records.
+ */
+ loadDump(records) {
+ var _this10 = this;
+
+ return (0, _asyncToGenerator3.default)(function* () {
+ if (!Array.isArray(records)) {
+ throw new Error("Records is not an array.");
+ }
+
+ for (let record of records) {
+ if (!record.hasOwnProperty("id") || !_this10.idSchema.validate(record.id)) {
+ throw new Error("Record has invalid ID: " + (0, _stringify2.default)(record));
+ }
+
+ if (!record.last_modified) {
+ throw new Error("Record has no last_modified value: " + (0, _stringify2.default)(record));
+ }
+ }
+
+ // Fetch all existing records from local database,
+ // and skip those who are newer or not marked as synced.
+
+ // XXX filter by status / ids in records
+
+ const { data } = yield _this10.list({}, { includeDeleted: true });
+ const existingById = data.reduce(function (acc, record) {
+ acc[record.id] = record;
+ return acc;
+ }, {});
+
+ const newRecords = records.filter(function (record) {
+ const localRecord = existingById[record.id];
+ const shouldKeep =
+ // No local record with this id.
+ localRecord === undefined ||
+ // Or local record is synced
+ localRecord._status === "synced" &&
+ // And was synced from server
+ localRecord.last_modified !== undefined &&
+ // And is older than imported one.
+ record.last_modified > localRecord.last_modified;
+ return shouldKeep;
+ });
+
+ return yield _this10.db.loadDump(newRecords.map(markSynced));
+ })();
+ }
+}
+
+exports.default = Collection; /**
+ * A Collection-oriented wrapper for an adapter's transaction.
+ *
+ * This defines the high-level functions available on a collection.
+ * The collection itself offers functions of the same name. These will
+ * perform just one operation in its own transaction.
+ */
+
+class CollectionTransaction {
+ constructor(collection, adapterTransaction) {
+ this.collection = collection;
+ this.adapterTransaction = adapterTransaction;
+
+ this._events = [];
+ }
+
+ _queueEvent(action, payload) {
+ this._events.push({ action, payload });
+ }
+
+ /**
+ * Emit queued events, to be called once every transaction operations have
+ * been executed successfully.
+ */
+ emitEvents() {
+ for (let { action, payload } of this._events) {
+ this.collection.events.emit(action, payload);
+ }
+ if (this._events.length > 0) {
+ const targets = this._events.map(({ action, payload }) => (0, _extends3.default)({ action }, payload));
+ this.collection.events.emit("change", { targets });
+ }
+ this._events = [];
+ }
+
+ /**
+ * Retrieve a record by its id from the local database, or
+ * undefined if none exists.
+ *
+ * This will also return virtually deleted records.
+ *
+ * @param {String} id
+ * @return {Object}
+ */
+ getAny(id) {
+ const record = this.adapterTransaction.get(id);
+ return { data: record, permissions: {} };
+ }
+
+ /**
+ * Retrieve a record by its id from the local database.
+ *
+ * Options:
+ * - {Boolean} includeDeleted: Include virtually deleted records.
+ *
+ * @param {String} id
+ * @param {Object} options
+ * @return {Object}
+ */
+ get(id, options = { includeDeleted: false }) {
+ const res = this.getAny(id);
+ if (!res.data || !options.includeDeleted && res.data._status === "deleted") {
+ throw new Error(`Record with id=${ id } not found.`);
+ }
+
+ return res;
+ }
+
+ /**
+ * Deletes a record from the local database.
+ *
+ * Options:
+ * - {Boolean} virtual: When set to `true`, doesn't actually delete the record,
+ * update its `_status` attribute to `deleted` instead (default: true)
+ *
+ * @param {String} id The record's Id.
+ * @param {Object} options The options object.
+ * @return {Object}
+ */
+ delete(id, options = { virtual: true }) {
+ // Ensure the record actually exists.
+ const existing = this.adapterTransaction.get(id);
+ const alreadyDeleted = existing && existing._status == "deleted";
+ if (!existing || alreadyDeleted && options.virtual) {
+ throw new Error(`Record with id=${ id } not found.`);
+ }
+ // Virtual updates status.
+ if (options.virtual) {
+ this.adapterTransaction.update(markDeleted(existing));
+ } else {
+ // Delete for real.
+ this.adapterTransaction.delete(id);
+ }
+ this._queueEvent("delete", { data: existing });
+ return { data: existing, permissions: {} };
+ }
+
+ /**
+ * Deletes a record from the local database, if any exists.
+ * Otherwise, do nothing.
+ *
+ * @param {String} id The record's Id.
+ * @return {Object}
+ */
+ deleteAny(id) {
+ const existing = this.adapterTransaction.get(id);
+ if (existing) {
+ this.adapterTransaction.update(markDeleted(existing));
+ this._queueEvent("delete", { data: existing });
+ }
+ return { data: (0, _extends3.default)({ id }, existing), deleted: !!existing, permissions: {} };
+ }
+
+ /**
+ * Adds a record to the local database, asserting that none
+ * already exist with this ID.
+ *
+ * @param {Object} record, which must contain an ID
+ * @return {Object}
+ */
+ create(record) {
+ if (typeof record !== "object") {
+ throw new Error("Record is not an object.");
+ }
+ if (!record.hasOwnProperty("id")) {
+ throw new Error("Cannot create a record missing id");
+ }
+ if (!this.collection.idSchema.validate(record.id)) {
+ throw new Error(`Invalid Id: ${ record.id }`);
+ }
+
+ this.adapterTransaction.create(record);
+ this._queueEvent("create", { data: record });
+ return { data: record, permissions: {} };
+ }
+
+ /**
+ * Updates a record from the local database.
+ *
+ * Options:
+ * - {Boolean} synced: Sets record status to "synced" (default: false)
+ * - {Boolean} patch: Extends the existing record instead of overwriting it
+ * (default: false)
+ *
+ * @param {Object} record
+ * @param {Object} options
+ * @return {Object}
+ */
+ update(record, options = { synced: false, patch: false }) {
+ if (typeof record !== "object") {
+ throw new Error("Record is not an object.");
+ }
+ if (!record.hasOwnProperty("id")) {
+ throw new Error("Cannot update a record missing id.");
+ }
+ if (!this.collection.idSchema.validate(record.id)) {
+ throw new Error(`Invalid Id: ${ record.id }`);
+ }
+
+ const oldRecord = this.adapterTransaction.get(record.id);
+ if (!oldRecord) {
+ throw new Error(`Record with id=${ record.id } not found.`);
+ }
+ const newRecord = options.patch ? (0, _extends3.default)({}, oldRecord, record) : record;
+ const updated = this._updateRaw(oldRecord, newRecord, options);
+ this.adapterTransaction.update(updated);
+ this._queueEvent("update", { data: updated, oldRecord });
+ return { data: updated, oldRecord, permissions: {} };
+ }
+
+ /**
+ * Lower-level primitive for updating a record while respecting
+ * _status and last_modified.
+ *
+ * @param {Object} oldRecord: the record retrieved from the DB
+ * @param {Object} newRecord: the record to replace it with
+ * @return {Object}
+ */
+ _updateRaw(oldRecord, newRecord, { synced = false } = {}) {
+ const updated = (0, _extends3.default)({}, newRecord);
+ // Make sure to never loose the existing timestamp.
+ if (oldRecord && oldRecord.last_modified && !updated.last_modified) {
+ updated.last_modified = oldRecord.last_modified;
+ }
+ // If only local fields have changed, then keep record as synced.
+ // If status is created, keep record as created.
+ // If status is deleted, mark as updated.
+ const isIdentical = oldRecord && recordsEqual(oldRecord, updated, this.localFields);
+ const keepSynced = isIdentical && oldRecord._status == "synced";
+ const neverSynced = !oldRecord || oldRecord && oldRecord._status == "created";
+ const newStatus = keepSynced || synced ? "synced" : neverSynced ? "created" : "updated";
+ return markStatus(updated, newStatus);
+ }
+
+ /**
+ * Upsert a record into the local database.
+ *
+ * This record must have an ID.
+ *
+ * If a record with this ID already exists, it will be replaced.
+ * Otherwise, this record will be inserted.
+ *
+ * @param {Object} record
+ * @return {Object}
+ */
+ upsert(record) {
+ if (typeof record !== "object") {
+ throw new Error("Record is not an object.");
+ }
+ if (!record.hasOwnProperty("id")) {
+ throw new Error("Cannot update a record missing id.");
+ }
+ if (!this.collection.idSchema.validate(record.id)) {
+ throw new Error(`Invalid Id: ${ record.id }`);
+ }
+ let oldRecord = this.adapterTransaction.get(record.id);
+ const updated = this._updateRaw(oldRecord, record);
+ this.adapterTransaction.update(updated);
+ // Don't return deleted records -- pretend they are gone
+ if (oldRecord && oldRecord._status == "deleted") {
+ oldRecord = undefined;
+ }
+ if (oldRecord) {
+ this._queueEvent("update", { data: updated, oldRecord });
+ } else {
+ this._queueEvent("create", { data: updated });
+ }
+ return { data: updated, oldRecord, permissions: {} };
+ }
+}
+exports.CollectionTransaction = CollectionTransaction;
+
+},{"./adapters/IDB":84,"./adapters/base":85,"./utils":87,"babel-runtime/core-js/json/stringify":3,"babel-runtime/core-js/object/assign":4,"babel-runtime/core-js/promise":6,"babel-runtime/helpers/asyncToGenerator":7,"babel-runtime/helpers/extends":8,"uuid":9}],87:[function(require,module,exports){
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.RE_UUID = undefined;
+
+var _promise = require("babel-runtime/core-js/promise");
+
+var _promise2 = _interopRequireDefault(_promise);
+
+var _keys = require("babel-runtime/core-js/object/keys");
+
+var _keys2 = _interopRequireDefault(_keys);
+
+exports.sortObjects = sortObjects;
+exports.filterObject = filterObject;
+exports.filterObjects = filterObjects;
+exports.isUUID = isUUID;
+exports.waterfall = waterfall;
+exports.deepEqual = deepEqual;
+exports.omitKeys = omitKeys;
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const RE_UUID = exports.RE_UUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
+
+/**
+ * Checks if a value is undefined.
+ * @param {Any} value
+ * @return {Boolean}
+ */
+function _isUndefined(value) {
+ return typeof value === "undefined";
+}
+
+/**
+ * Sorts records in a list according to a given ordering.
+ *
+ * @param {String} order The ordering, eg. `-last_modified`.
+ * @param {Array} list The collection to order.
+ * @return {Array}
+ */
+function sortObjects(order, list) {
+ const hasDash = order[0] === "-";
+ const field = hasDash ? order.slice(1) : order;
+ const direction = hasDash ? -1 : 1;
+ return list.slice().sort((a, b) => {
+ if (a[field] && _isUndefined(b[field])) {
+ return direction;
+ }
+ if (b[field] && _isUndefined(a[field])) {
+ return -direction;
+ }
+ if (_isUndefined(a[field]) && _isUndefined(b[field])) {
+ return 0;
+ }
+ return a[field] > b[field] ? direction : -direction;
+ });
+}
+
+/**
+ * Test if a single object matches all given filters.
+ *
+ * @param {Object} filters The filters object.
+ * @param {Object} entry The object to filter.
+ * @return {Function}
+ */
+function filterObject(filters, entry) {
+ return (0, _keys2.default)(filters).every(filter => {
+ const value = filters[filter];
+ if (Array.isArray(value)) {
+ return value.some(candidate => candidate === entry[filter]);
+ }
+ return entry[filter] === value;
+ });
+}
+
+/**
+ * Filters records in a list matching all given filters.
+ *
+ * @param {Object} filters The filters object.
+ * @param {Array} list The collection to filter.
+ * @return {Array}
+ */
+function filterObjects(filters, list) {
+ return list.filter(entry => {
+ return filterObject(filters, entry);
+ });
+}
+
+/**
+ * Checks if a string is an UUID.
+ *
+ * @param {String} uuid The uuid to validate.
+ * @return {Boolean}
+ */
+function isUUID(uuid) {
+ return RE_UUID.test(uuid);
+}
+
+/**
+ * Resolves a list of functions sequentially, which can be sync or async; in
+ * case of async, functions must return a promise.
+ *
+ * @param {Array} fns The list of functions.
+ * @param {Any} init The initial value.
+ * @return {Promise}
+ */
+function waterfall(fns, init) {
+ if (!fns.length) {
+ return _promise2.default.resolve(init);
+ }
+ return fns.reduce((promise, nextFn) => {
+ return promise.then(nextFn);
+ }, _promise2.default.resolve(init));
+}
+
+/**
+ * Simple deep object comparison function. This only supports comparison of
+ * serializable JavaScript objects.
+ *
+ * @param {Object} a The source object.
+ * @param {Object} b The compared object.
+ * @return {Boolean}
+ */
+function deepEqual(a, b) {
+ if (a === b) {
+ return true;
+ }
+ if (typeof a !== typeof b) {
+ return false;
+ }
+ if (!(a && typeof a == "object") || !(b && typeof b == "object")) {
+ return false;
+ }
+ if ((0, _keys2.default)(a).length !== (0, _keys2.default)(b).length) {
+ return false;
+ }
+ for (let k in a) {
+ if (!deepEqual(a[k], b[k])) {
+ return false;
+ }
+ }
+ return true;
+}
+
+/**
+ * Return an object without the specified keys.
+ *
+ * @param {Object} obj The original object.
+ * @param {Array} keys The list of keys to exclude.
+ * @return {Object} A copy without the specified keys.
+ */
+function omitKeys(obj, keys = []) {
+ return (0, _keys2.default)(obj).reduce((acc, key) => {
+ if (keys.indexOf(key) === -1) {
+ acc[key] = obj[key];
+ }
+ return acc;
+ }, {});
+}
+
+},{"babel-runtime/core-js/object/keys":5,"babel-runtime/core-js/promise":6}]},{},[2])(2)
+}); \ No newline at end of file
diff --git a/services/common/logmanager.js b/services/common/logmanager.js
new file mode 100644
index 000000000..17e47f9e3
--- /dev/null
+++ b/services/common/logmanager.js
@@ -0,0 +1,331 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict;"
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "Services",
+ "resource://gre/modules/Services.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "FileUtils",
+ "resource://gre/modules/FileUtils.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "Log",
+ "resource://gre/modules/Log.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "OS",
+ "resource://gre/modules/osfile.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "CommonUtils",
+ "resource://services-common/utils.js");
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
+
+this.EXPORTED_SYMBOLS = [
+ "LogManager",
+];
+
+const DEFAULT_MAX_ERROR_AGE = 20 * 24 * 60 * 60; // 20 days
+
+// "shared" logs (ie, where the same log name is used by multiple LogManager
+// instances) are a fact of life here - eg, FirefoxAccounts logs are used by
+// both Sync and Reading List.
+// However, different instances have different pref branches, so we need to
+// handle when one pref branch says "Debug" and the other says "Error"
+// So we (a) keep singleton console and dump appenders and (b) keep track
+// of the minimum (ie, most verbose) level and use that.
+// This avoids (a) the most recent setter winning (as that is indeterminate)
+// and (b) multiple dump/console appenders being added to the same log multiple
+// times, which would cause messages to appear twice.
+
+// Singletons used by each instance.
+var formatter;
+var dumpAppender;
+var consoleAppender;
+
+// A set of all preference roots used by all instances.
+var allBranches = new Set();
+
+// A storage appender that is flushable to a file on disk. Policies for
+// when to flush, to what file, log rotation etc are up to the consumer
+// (although it does maintain a .sawError property to help the consumer decide
+// based on its policies)
+function FlushableStorageAppender(formatter) {
+ Log.StorageStreamAppender.call(this, formatter);
+ this.sawError = false;
+}
+
+FlushableStorageAppender.prototype = {
+ __proto__: Log.StorageStreamAppender.prototype,
+
+ append(message) {
+ if (message.level >= Log.Level.Error) {
+ this.sawError = true;
+ }
+ Log.StorageStreamAppender.prototype.append.call(this, message);
+ },
+
+ reset() {
+ Log.StorageStreamAppender.prototype.reset.call(this);
+ this.sawError = false;
+ },
+
+ // Flush the current stream to a file. Somewhat counter-intuitively, you
+ // must pass a log which will be written to with details of the operation.
+ flushToFile: Task.async(function* (subdirArray, filename, log) {
+ let inStream = this.getInputStream();
+ this.reset();
+ if (!inStream) {
+ log.debug("Failed to flush log to a file - no input stream");
+ return;
+ }
+ log.debug("Flushing file log");
+ log.trace("Beginning stream copy to " + filename + ": " + Date.now());
+ try {
+ yield this._copyStreamToFile(inStream, subdirArray, filename, log);
+ log.trace("onCopyComplete", Date.now());
+ } catch (ex) {
+ log.error("Failed to copy log stream to file", ex);
+ }
+ }),
+
+ /**
+ * Copy an input stream to the named file, doing everything off the main
+ * thread.
+ * subDirArray is an array of path components, relative to the profile
+ * directory, where the file will be created.
+ * outputFileName is the filename to create.
+ * Returns a promise that is resolved on completion or rejected with an error.
+ */
+ _copyStreamToFile: Task.async(function* (inputStream, subdirArray, outputFileName, log) {
+ // The log data could be large, so we don't want to pass it all in a single
+ // message, so use BUFFER_SIZE chunks.
+ const BUFFER_SIZE = 8192;
+
+ // get a binary stream
+ let binaryStream = Cc["@mozilla.org/binaryinputstream;1"].createInstance(Ci.nsIBinaryInputStream);
+ binaryStream.setInputStream(inputStream);
+
+ let outputDirectory = OS.Path.join(OS.Constants.Path.profileDir, ...subdirArray);
+ yield OS.File.makeDir(outputDirectory, { ignoreExisting: true, from: OS.Constants.Path.profileDir });
+ let fullOutputFileName = OS.Path.join(outputDirectory, outputFileName);
+ let output = yield OS.File.open(fullOutputFileName, { write: true} );
+ try {
+ while (true) {
+ let available = binaryStream.available();
+ if (!available) {
+ break;
+ }
+ let chunk = binaryStream.readByteArray(Math.min(available, BUFFER_SIZE));
+ yield output.write(new Uint8Array(chunk));
+ }
+ } finally {
+ try {
+ binaryStream.close(); // inputStream is closed by the binaryStream
+ yield output.close();
+ } catch (ex) {
+ log.error("Failed to close the input stream", ex);
+ }
+ }
+ log.trace("finished copy to", fullOutputFileName);
+ }),
+}
+
+// The public LogManager object.
+function LogManager(prefRoot, logNames, logFilePrefix) {
+ this._prefObservers = [];
+ this.init(prefRoot, logNames, logFilePrefix);
+}
+
+LogManager.prototype = {
+ _cleaningUpFileLogs: false,
+
+ init(prefRoot, logNames, logFilePrefix) {
+ if (prefRoot instanceof Preferences) {
+ this._prefs = prefRoot;
+ } else {
+ this._prefs = new Preferences(prefRoot);
+ }
+
+ this.logFilePrefix = logFilePrefix;
+ if (!formatter) {
+ // Create a formatter and various appenders to attach to the logs.
+ formatter = new Log.BasicFormatter();
+ consoleAppender = new Log.ConsoleAppender(formatter);
+ dumpAppender = new Log.DumpAppender(formatter);
+ }
+
+ allBranches.add(this._prefs._branchStr);
+ // We create a preference observer for all our prefs so they are magically
+ // reflected if the pref changes after creation.
+ let setupAppender = (appender, prefName, defaultLevel, findSmallest = false) => {
+ let observer = newVal => {
+ let level = Log.Level[newVal] || defaultLevel;
+ if (findSmallest) {
+ // As some of our appenders have global impact (ie, there is only one
+ // place 'dump' goes to), we need to find the smallest value from all
+ // prefs controlling this appender.
+ // For example, if consumerA has dump=Debug then consumerB sets
+ // dump=Error, we need to keep dump=Debug so consumerA is respected.
+ for (let branch of allBranches) {
+ let lookPrefBranch = new Preferences(branch);
+ let lookVal = Log.Level[lookPrefBranch.get(prefName)];
+ if (lookVal && lookVal < level) {
+ level = lookVal;
+ }
+ }
+ }
+ appender.level = level;
+ }
+ this._prefs.observe(prefName, observer, this);
+ this._prefObservers.push([prefName, observer]);
+ // and call the observer now with the current pref value.
+ observer(this._prefs.get(prefName));
+ return observer;
+ }
+
+ this._observeConsolePref = setupAppender(consoleAppender, "log.appender.console", Log.Level.Fatal, true);
+ this._observeDumpPref = setupAppender(dumpAppender, "log.appender.dump", Log.Level.Error, true);
+
+ // The file appender doesn't get the special singleton behaviour.
+ let fapp = this._fileAppender = new FlushableStorageAppender(formatter);
+ // the stream gets a default of Debug as the user must go out of their way
+ // to see the stuff spewed to it.
+ this._observeStreamPref = setupAppender(fapp, "log.appender.file.level", Log.Level.Debug);
+
+ // now attach the appenders to all our logs.
+ for (let logName of logNames) {
+ let log = Log.repository.getLogger(logName);
+ for (let appender of [fapp, dumpAppender, consoleAppender]) {
+ log.addAppender(appender);
+ }
+ }
+ // and use the first specified log as a "root" for our log.
+ this._log = Log.repository.getLogger(logNames[0] + ".LogManager");
+ },
+
+ /**
+ * Cleanup this instance
+ */
+ finalize() {
+ for (let [name, pref] of this._prefObservers) {
+ this._prefs.ignore(name, pref, this);
+ }
+ this._prefObservers = [];
+ try {
+ allBranches.delete(this._prefs._branchStr);
+ } catch (e) {}
+ this._prefs = null;
+ },
+
+ get _logFileSubDirectoryEntries() {
+ // At this point we don't allow a custom directory for the logs, nor allow
+ // it to be outside the profile directory.
+ // This returns an array of the the relative directory entries below the
+ // profile dir, and is the directory about:sync-log uses.
+ return ["weave", "logs"];
+ },
+
+ get sawError() {
+ return this._fileAppender.sawError;
+ },
+
+ // Result values for resetFileLog.
+ SUCCESS_LOG_WRITTEN: "success-log-written",
+ ERROR_LOG_WRITTEN: "error-log-written",
+
+ /**
+ * Possibly generate a log file for all accumulated log messages and refresh
+ * the input & output streams.
+ * Whether a "success" or "error" log is written is determined based on
+ * whether an "Error" log entry was written to any of the logs.
+ * Returns a promise that resolves on completion with either null (for no
+ * file written or on error), SUCCESS_LOG_WRITTEN if a "success" log was
+ * written, or ERROR_LOG_WRITTEN if an "error" log was written.
+ */
+ resetFileLog: Task.async(function* () {
+ try {
+ let flushToFile;
+ let reasonPrefix;
+ let reason;
+ if (this._fileAppender.sawError) {
+ reason = this.ERROR_LOG_WRITTEN;
+ flushToFile = this._prefs.get("log.appender.file.logOnError", true);
+ reasonPrefix = "error";
+ } else {
+ reason = this.SUCCESS_LOG_WRITTEN;
+ flushToFile = this._prefs.get("log.appender.file.logOnSuccess", false);
+ reasonPrefix = "success";
+ }
+
+ // might as well avoid creating an input stream if we aren't going to use it.
+ if (!flushToFile) {
+ this._fileAppender.reset();
+ return null;
+ }
+
+ // We have reasonPrefix at the start of the filename so all "error"
+ // logs are grouped in about:sync-log.
+ let filename = reasonPrefix + "-" + this.logFilePrefix + "-" + Date.now() + ".txt";
+ yield this._fileAppender.flushToFile(this._logFileSubDirectoryEntries, filename, this._log);
+
+ // It's not completely clear to markh why we only do log cleanups
+ // for errors, but for now the Sync semantics have been copied...
+ // (one theory is that only cleaning up on error makes it less
+ // likely old error logs would be removed, but that's not true if
+ // there are occasional errors - let's address this later!)
+ if (reason == this.ERROR_LOG_WRITTEN && !this._cleaningUpFileLogs) {
+ this._log.trace("Scheduling cleanup.");
+ // Note we don't return/yield or otherwise wait on this promise - it
+ // continues in the background
+ this.cleanupLogs().catch(err => {
+ this._log.error("Failed to cleanup logs", err);
+ });
+ }
+ return reason;
+ } catch (ex) {
+ this._log.error("Failed to resetFileLog", ex);
+ return null;
+ }
+ }),
+
+ /**
+ * Finds all logs older than maxErrorAge and deletes them using async I/O.
+ */
+ cleanupLogs: Task.async(function* () {
+ this._cleaningUpFileLogs = true;
+ let logDir = FileUtils.getDir("ProfD", this._logFileSubDirectoryEntries);
+ let iterator = new OS.File.DirectoryIterator(logDir.path);
+ let maxAge = this._prefs.get("log.appender.file.maxErrorAge", DEFAULT_MAX_ERROR_AGE);
+ let threshold = Date.now() - 1000 * maxAge;
+
+ this._log.debug("Log cleanup threshold time: " + threshold);
+ yield iterator.forEach(Task.async(function* (entry) {
+ // Note that we don't check this.logFilePrefix is in the name - we cleanup
+ // all files in this directory regardless of that prefix so old logfiles
+ // for prefixes no longer in use are still cleaned up. See bug 1279145.
+ if (!entry.name.startsWith("error-") &&
+ !entry.name.startsWith("success-")) {
+ return;
+ }
+ try {
+ // need to call .stat() as the enumerator doesn't give that to us on *nix.
+ let info = yield OS.File.stat(entry.path);
+ if (info.lastModificationDate.getTime() >= threshold) {
+ return;
+ }
+ this._log.trace(" > Cleanup removing " + entry.name +
+ " (" + info.lastModificationDate.getTime() + ")");
+ yield OS.File.remove(entry.path);
+ this._log.trace("Deleted " + entry.name);
+ } catch (ex) {
+ this._log.debug("Encountered error trying to clean up old log file "
+ + entry.name, ex);
+ }
+ }.bind(this)));
+ iterator.close();
+ this._cleaningUpFileLogs = false;
+ this._log.debug("Done deleting files.");
+ // This notification is used only for tests.
+ Services.obs.notifyObservers(null, "services-tests:common:log-manager:cleanup-logs", null);
+ }),
+}
diff --git a/services/common/modules-testing/logging.js b/services/common/modules-testing/logging.js
new file mode 100644
index 000000000..3ff2c396c
--- /dev/null
+++ b/services/common/modules-testing/logging.js
@@ -0,0 +1,54 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "getTestLogger",
+ "initTestLogging",
+];
+
+var {utils: Cu} = Components;
+
+Cu.import("resource://gre/modules/Log.jsm");
+
+this.initTestLogging = function initTestLogging(level) {
+ function LogStats() {
+ this.errorsLogged = 0;
+ }
+ LogStats.prototype = {
+ format: function format(message) {
+ if (message.level == Log.Level.Error) {
+ this.errorsLogged += 1;
+ }
+
+ return message.time + "\t" + message.loggerName + "\t" + message.levelDesc + "\t" +
+ this.formatText(message) + "\n";
+ }
+ };
+ LogStats.prototype.__proto__ = new Log.BasicFormatter();
+
+ let log = Log.repository.rootLogger;
+ let logStats = new LogStats();
+ let appender = new Log.DumpAppender(logStats);
+
+ if (typeof(level) == "undefined") {
+ level = "Debug";
+ }
+ getTestLogger().level = Log.Level[level];
+ Log.repository.getLogger("Services").level = Log.Level[level];
+
+ log.level = Log.Level.Trace;
+ appender.level = Log.Level.Trace;
+ // Overwrite any other appenders (e.g. from previous incarnations)
+ log.ownAppenders = [appender];
+ log.updateAppenders();
+
+ return logStats;
+}
+
+this.getTestLogger = function getTestLogger(component) {
+ return Log.repository.getLogger("Testing");
+}
+
diff --git a/services/common/modules-testing/storageserver.js b/services/common/modules-testing/storageserver.js
new file mode 100644
index 000000000..650ac307f
--- /dev/null
+++ b/services/common/modules-testing/storageserver.js
@@ -0,0 +1,1677 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * This file contains an implementation of the Storage Server in JavaScript.
+ *
+ * The server should not be used for any production purposes.
+ */
+
+var {classes: Cc, interfaces: Ci, utils: Cu} = Components;
+
+this.EXPORTED_SYMBOLS = [
+ "ServerBSO",
+ "StorageServerCallback",
+ "StorageServerCollection",
+ "StorageServer",
+ "storageServerForUsers",
+];
+
+Cu.import("resource://testing-common/httpd.js");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/utils.js");
+
+const STORAGE_HTTP_LOGGER = "Services.Common.Test.Server";
+const STORAGE_API_VERSION = "2.0";
+
+// Use the same method that record.js does, which mirrors the server.
+function new_timestamp() {
+ return Math.round(Date.now());
+}
+
+function isInteger(s) {
+ let re = /^[0-9]+$/;
+ return re.test(s);
+}
+
+function writeHttpBody(response, body) {
+ if (!body) {
+ return;
+ }
+
+ response.bodyOutputStream.write(body, body.length);
+}
+
+function sendMozSvcError(request, response, code) {
+ response.setStatusLine(request.httpVersion, 400, "Bad Request");
+ response.setHeader("Content-Type", "text/plain", false);
+ response.bodyOutputStream.write(code, code.length);
+}
+
+/**
+ * Represent a BSO on the server.
+ *
+ * A BSO is constructed from an ID, content, and a modified time.
+ *
+ * @param id
+ * (string) ID of the BSO being created.
+ * @param payload
+ * (strong|object) Payload for the BSO. Should ideally be a string. If
+ * an object is passed, it will be fed into JSON.stringify and that
+ * output will be set as the payload.
+ * @param modified
+ * (number) Milliseconds since UNIX epoch that the BSO was last
+ * modified. If not defined or null, the current time will be used.
+ */
+this.ServerBSO = function ServerBSO(id, payload, modified) {
+ if (!id) {
+ throw new Error("No ID for ServerBSO!");
+ }
+
+ if (!id.match(/^[a-zA-Z0-9_-]{1,64}$/)) {
+ throw new Error("BSO ID is invalid: " + id);
+ }
+
+ this._log = Log.repository.getLogger(STORAGE_HTTP_LOGGER);
+
+ this.id = id;
+ if (!payload) {
+ return;
+ }
+
+ CommonUtils.ensureMillisecondsTimestamp(modified);
+
+ if (typeof payload == "object") {
+ payload = JSON.stringify(payload);
+ }
+
+ this.payload = payload;
+ this.modified = modified || new_timestamp();
+}
+ServerBSO.prototype = {
+ FIELDS: [
+ "id",
+ "modified",
+ "payload",
+ "ttl",
+ "sortindex",
+ ],
+
+ toJSON: function toJSON() {
+ let obj = {};
+
+ for (let key of this.FIELDS) {
+ if (this[key] !== undefined) {
+ obj[key] = this[key];
+ }
+ }
+
+ return obj;
+ },
+
+ delete: function delete_() {
+ this.deleted = true;
+
+ delete this.payload;
+ delete this.modified;
+ },
+
+ /**
+ * Handler for GET requests for this BSO.
+ */
+ getHandler: function getHandler(request, response) {
+ let code = 200;
+ let status = "OK";
+ let body;
+
+ function sendResponse() {
+ response.setStatusLine(request.httpVersion, code, status);
+ writeHttpBody(response, body);
+ }
+
+ if (request.hasHeader("x-if-modified-since")) {
+ let headerModified = parseInt(request.getHeader("x-if-modified-since"),
+ 10);
+ CommonUtils.ensureMillisecondsTimestamp(headerModified);
+
+ if (headerModified >= this.modified) {
+ code = 304;
+ status = "Not Modified";
+
+ sendResponse();
+ return;
+ }
+ } else if (request.hasHeader("x-if-unmodified-since")) {
+ let requestModified = parseInt(request.getHeader("x-if-unmodified-since"),
+ 10);
+ let serverModified = this.modified;
+
+ if (serverModified > requestModified) {
+ code = 412;
+ status = "Precondition Failed";
+ sendResponse();
+ return;
+ }
+ }
+
+ if (!this.deleted) {
+ body = JSON.stringify(this.toJSON());
+ response.setHeader("Content-Type", "application/json", false);
+ response.setHeader("X-Last-Modified", "" + this.modified, false);
+ } else {
+ code = 404;
+ status = "Not Found";
+ }
+
+ sendResponse();
+ },
+
+ /**
+ * Handler for PUT requests for this BSO.
+ */
+ putHandler: function putHandler(request, response) {
+ if (request.hasHeader("Content-Type")) {
+ let ct = request.getHeader("Content-Type");
+ if (ct != "application/json") {
+ throw HTTP_415;
+ }
+ }
+
+ let input = CommonUtils.readBytesFromInputStream(request.bodyInputStream);
+ let parsed;
+ try {
+ parsed = JSON.parse(input);
+ } catch (ex) {
+ return sendMozSvcError(request, response, "8");
+ }
+
+ if (typeof(parsed) != "object") {
+ return sendMozSvcError(request, response, "8");
+ }
+
+ // Don't update if a conditional request fails preconditions.
+ if (request.hasHeader("x-if-unmodified-since")) {
+ let reqModified = parseInt(request.getHeader("x-if-unmodified-since"));
+
+ if (reqModified < this.modified) {
+ response.setStatusLine(request.httpVersion, 412, "Precondition Failed");
+ return;
+ }
+ }
+
+ let code, status;
+ if (this.payload) {
+ code = 204;
+ status = "No Content";
+ } else {
+ code = 201;
+ status = "Created";
+ }
+
+ // Alert when we see unrecognized fields.
+ for (let [key, value] of Object.entries(parsed)) {
+ switch (key) {
+ case "payload":
+ if (typeof(value) != "string") {
+ sendMozSvcError(request, response, "8");
+ return true;
+ }
+
+ this.payload = value;
+ break;
+
+ case "ttl":
+ if (!isInteger(value)) {
+ sendMozSvcError(request, response, "8");
+ return true;
+ }
+ this.ttl = parseInt(value, 10);
+ break;
+
+ case "sortindex":
+ if (!isInteger(value) || value.length > 9) {
+ sendMozSvcError(request, response, "8");
+ return true;
+ }
+ this.sortindex = parseInt(value, 10);
+ break;
+
+ case "id":
+ break;
+
+ default:
+ this._log.warn("Unexpected field in BSO record: " + key);
+ sendMozSvcError(request, response, "8");
+ return true;
+ }
+ }
+
+ this.modified = request.timestamp;
+ this.deleted = false;
+ response.setHeader("X-Last-Modified", "" + this.modified, false);
+
+ response.setStatusLine(request.httpVersion, code, status);
+ },
+};
+
+/**
+ * Represent a collection on the server.
+ *
+ * The '_bsos' attribute is a mapping of id -> ServerBSO objects.
+ *
+ * Note that if you want these records to be accessible individually,
+ * you need to register their handlers with the server separately, or use a
+ * containing HTTP server that will do so on your behalf.
+ *
+ * @param bsos
+ * An object mapping BSO IDs to ServerBSOs.
+ * @param acceptNew
+ * If true, POSTs to this collection URI will result in new BSOs being
+ * created and wired in on the fly.
+ * @param timestamp
+ * An optional timestamp value to initialize the modified time of the
+ * collection. This should be in the format returned by new_timestamp().
+ */
+this.StorageServerCollection =
+ function StorageServerCollection(bsos, acceptNew, timestamp=new_timestamp()) {
+ this._bsos = bsos || {};
+ this.acceptNew = acceptNew || false;
+
+ /*
+ * Track modified timestamp.
+ * We can't just use the timestamps of contained BSOs: an empty collection
+ * has a modified time.
+ */
+ CommonUtils.ensureMillisecondsTimestamp(timestamp);
+ this._timestamp = timestamp;
+
+ this._log = Log.repository.getLogger(STORAGE_HTTP_LOGGER);
+}
+StorageServerCollection.prototype = {
+ BATCH_MAX_COUNT: 100, // # of records.
+ BATCH_MAX_SIZE: 1024 * 1024, // # bytes.
+
+ _timestamp: null,
+
+ get timestamp() {
+ return this._timestamp;
+ },
+
+ set timestamp(timestamp) {
+ CommonUtils.ensureMillisecondsTimestamp(timestamp);
+ this._timestamp = timestamp;
+ },
+
+ get totalPayloadSize() {
+ let size = 0;
+ for (let bso of this.bsos()) {
+ size += bso.payload.length;
+ }
+
+ return size;
+ },
+
+ /**
+ * Convenience accessor for our BSO keys.
+ * Excludes deleted items, of course.
+ *
+ * @param filter
+ * A predicate function (applied to the ID and BSO) which dictates
+ * whether to include the BSO's ID in the output.
+ *
+ * @return an array of IDs.
+ */
+ keys: function keys(filter) {
+ let ids = [];
+ for (let [id, bso] of Object.entries(this._bsos)) {
+ if (!bso.deleted && (!filter || filter(id, bso))) {
+ ids.push(id);
+ }
+ }
+ return ids;
+ },
+
+ /**
+ * Convenience method to get an array of BSOs.
+ * Optionally provide a filter function.
+ *
+ * @param filter
+ * A predicate function, applied to the BSO, which dictates whether to
+ * include the BSO in the output.
+ *
+ * @return an array of ServerBSOs.
+ */
+ bsos: function bsos(filter) {
+ let os = [];
+ for (let [id, bso] of Object.entries(this._bsos)) {
+ if (!bso.deleted) {
+ os.push(bso);
+ }
+ }
+
+ if (!filter) {
+ return os;
+ }
+
+ return os.filter(filter);
+ },
+
+ /**
+ * Obtain a BSO by ID.
+ */
+ bso: function bso(id) {
+ return this._bsos[id];
+ },
+
+ /**
+ * Obtain the payload of a specific BSO.
+ *
+ * Raises if the specified BSO does not exist.
+ */
+ payload: function payload(id) {
+ return this.bso(id).payload;
+ },
+
+ /**
+ * Insert the provided BSO under its ID.
+ *
+ * @return the provided BSO.
+ */
+ insertBSO: function insertBSO(bso) {
+ return this._bsos[bso.id] = bso;
+ },
+
+ /**
+ * Insert the provided payload as part of a new ServerBSO with the provided
+ * ID.
+ *
+ * @param id
+ * The GUID for the BSO.
+ * @param payload
+ * The payload, as provided to the ServerBSO constructor.
+ * @param modified
+ * An optional modified time for the ServerBSO. If not specified, the
+ * current time will be used.
+ *
+ * @return the inserted BSO.
+ */
+ insert: function insert(id, payload, modified) {
+ return this.insertBSO(new ServerBSO(id, payload, modified));
+ },
+
+ /**
+ * Removes an object entirely from the collection.
+ *
+ * @param id
+ * (string) ID to remove.
+ */
+ remove: function remove(id) {
+ delete this._bsos[id];
+ },
+
+ _inResultSet: function _inResultSet(bso, options) {
+ if (!bso.payload) {
+ return false;
+ }
+
+ if (options.ids) {
+ if (options.ids.indexOf(bso.id) == -1) {
+ return false;
+ }
+ }
+
+ if (options.newer) {
+ if (bso.modified <= options.newer) {
+ return false;
+ }
+ }
+
+ if (options.older) {
+ if (bso.modified >= options.older) {
+ return false;
+ }
+ }
+
+ return true;
+ },
+
+ count: function count(options) {
+ options = options || {};
+ let c = 0;
+ for (let [id, bso] of Object.entries(this._bsos)) {
+ if (bso.modified && this._inResultSet(bso, options)) {
+ c++;
+ }
+ }
+ return c;
+ },
+
+ get: function get(options) {
+ let data = [];
+ for (let id in this._bsos) {
+ let bso = this._bsos[id];
+ if (!bso.modified) {
+ continue;
+ }
+
+ if (!this._inResultSet(bso, options)) {
+ continue;
+ }
+
+ data.push(bso);
+ }
+
+ if (options.sort) {
+ if (options.sort == "oldest") {
+ data.sort(function sortOldest(a, b) {
+ if (a.modified == b.modified) {
+ return 0;
+ }
+
+ return a.modified < b.modified ? -1 : 1;
+ });
+ } else if (options.sort == "newest") {
+ data.sort(function sortNewest(a, b) {
+ if (a.modified == b.modified) {
+ return 0;
+ }
+
+ return a.modified > b.modified ? -1 : 1;
+ });
+ } else if (options.sort == "index") {
+ data.sort(function sortIndex(a, b) {
+ if (a.sortindex == b.sortindex) {
+ return 0;
+ }
+
+ if (a.sortindex !== undefined && b.sortindex == undefined) {
+ return 1;
+ }
+
+ if (a.sortindex === undefined && b.sortindex !== undefined) {
+ return -1;
+ }
+
+ return a.sortindex > b.sortindex ? -1 : 1;
+ });
+ }
+ }
+
+ if (options.limit) {
+ data = data.slice(0, options.limit);
+ }
+
+ return data;
+ },
+
+ post: function post(input, timestamp) {
+ let success = [];
+ let failed = {};
+ let count = 0;
+ let size = 0;
+
+ // This will count records where we have an existing ServerBSO
+ // registered with us as successful and all other records as failed.
+ for (let record of input) {
+ count += 1;
+ if (count > this.BATCH_MAX_COUNT) {
+ failed[record.id] = "Max record count exceeded.";
+ continue;
+ }
+
+ if (typeof(record.payload) != "string") {
+ failed[record.id] = "Payload is not a string!";
+ continue;
+ }
+
+ size += record.payload.length;
+ if (size > this.BATCH_MAX_SIZE) {
+ failed[record.id] = "Payload max size exceeded!";
+ continue;
+ }
+
+ if (record.sortindex) {
+ if (!isInteger(record.sortindex)) {
+ failed[record.id] = "sortindex is not an integer.";
+ continue;
+ }
+
+ if (record.sortindex.length > 9) {
+ failed[record.id] = "sortindex is too long.";
+ continue;
+ }
+ }
+
+ if ("ttl" in record) {
+ if (!isInteger(record.ttl)) {
+ failed[record.id] = "ttl is not an integer.";
+ continue;
+ }
+ }
+
+ try {
+ let bso = this.bso(record.id);
+ if (!bso && this.acceptNew) {
+ this._log.debug("Creating BSO " + JSON.stringify(record.id) +
+ " on the fly.");
+ bso = new ServerBSO(record.id);
+ this.insertBSO(bso);
+ }
+ if (bso) {
+ bso.payload = record.payload;
+ bso.modified = timestamp;
+ bso.deleted = false;
+ success.push(record.id);
+
+ if (record.sortindex) {
+ bso.sortindex = parseInt(record.sortindex, 10);
+ }
+
+ } else {
+ failed[record.id] = "no bso configured";
+ }
+ } catch (ex) {
+ this._log.info("Exception when processing BSO", ex);
+ failed[record.id] = "Exception when processing.";
+ }
+ }
+ return {success: success, failed: failed};
+ },
+
+ delete: function delete_(options) {
+ options = options || {};
+
+ // Protocol 2.0 only allows the "ids" query string argument.
+ let keys = Object.keys(options).filter(function(k) {
+ return k != "ids";
+ });
+ if (keys.length) {
+ this._log.warn("Invalid query string parameter to collection delete: " +
+ keys.join(", "));
+ throw new Error("Malformed client request.");
+ }
+
+ if (options.ids && options.ids.length > this.BATCH_MAX_COUNT) {
+ throw HTTP_400;
+ }
+
+ let deleted = [];
+ for (let [id, bso] of Object.entries(this._bsos)) {
+ if (this._inResultSet(bso, options)) {
+ this._log.debug("Deleting " + JSON.stringify(bso));
+ deleted.push(bso.id);
+ bso.delete();
+ }
+ }
+ return deleted;
+ },
+
+ parseOptions: function parseOptions(request) {
+ let options = {};
+
+ for (let chunk of request.queryString.split("&")) {
+ if (!chunk) {
+ continue;
+ }
+ chunk = chunk.split("=");
+ let key = decodeURIComponent(chunk[0]);
+ if (chunk.length == 1) {
+ options[key] = "";
+ } else {
+ options[key] = decodeURIComponent(chunk[1]);
+ }
+ }
+
+ if (options.ids) {
+ options.ids = options.ids.split(",");
+ }
+
+ if (options.newer) {
+ if (!isInteger(options.newer)) {
+ throw HTTP_400;
+ }
+
+ CommonUtils.ensureMillisecondsTimestamp(options.newer);
+ options.newer = parseInt(options.newer, 10);
+ }
+
+ if (options.older) {
+ if (!isInteger(options.older)) {
+ throw HTTP_400;
+ }
+
+ CommonUtils.ensureMillisecondsTimestamp(options.older);
+ options.older = parseInt(options.older, 10);
+ }
+
+ if (options.limit) {
+ if (!isInteger(options.limit)) {
+ throw HTTP_400;
+ }
+
+ options.limit = parseInt(options.limit, 10);
+ }
+
+ return options;
+ },
+
+ getHandler: function getHandler(request, response) {
+ let options = this.parseOptions(request);
+ let data = this.get(options);
+
+ if (request.hasHeader("x-if-modified-since")) {
+ let requestModified = parseInt(request.getHeader("x-if-modified-since"),
+ 10);
+ let newestBSO = 0;
+ for (let bso of data) {
+ if (bso.modified > newestBSO) {
+ newestBSO = bso.modified;
+ }
+ }
+
+ if (requestModified >= newestBSO) {
+ response.setHeader("X-Last-Modified", "" + newestBSO);
+ response.setStatusLine(request.httpVersion, 304, "Not Modified");
+ return;
+ }
+ } else if (request.hasHeader("x-if-unmodified-since")) {
+ let requestModified = parseInt(request.getHeader("x-if-unmodified-since"),
+ 10);
+ let serverModified = this.timestamp;
+
+ if (serverModified > requestModified) {
+ response.setHeader("X-Last-Modified", "" + serverModified);
+ response.setStatusLine(request.httpVersion, 412, "Precondition Failed");
+ return;
+ }
+ }
+
+ if (options.full) {
+ data = data.map(function map(bso) {
+ return bso.toJSON();
+ });
+ } else {
+ data = data.map(function map(bso) {
+ return bso.id;
+ });
+ }
+
+ // application/json is default media type.
+ let newlines = false;
+ if (request.hasHeader("accept")) {
+ let accept = request.getHeader("accept");
+ if (accept == "application/newlines") {
+ newlines = true;
+ } else if (accept != "application/json") {
+ throw HTTP_406;
+ }
+ }
+
+ let body;
+ if (newlines) {
+ response.setHeader("Content-Type", "application/newlines", false);
+ let normalized = data.map(function map(d) {
+ return JSON.stringify(d);
+ });
+
+ body = normalized.join("\n") + "\n";
+ } else {
+ response.setHeader("Content-Type", "application/json", false);
+ body = JSON.stringify({items: data});
+ }
+
+ this._log.info("Records: " + data.length);
+ response.setHeader("X-Num-Records", "" + data.length, false);
+ response.setHeader("X-Last-Modified", "" + this.timestamp, false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ },
+
+ postHandler: function postHandler(request, response) {
+ let options = this.parseOptions(request);
+
+ if (!request.hasHeader("content-type")) {
+ this._log.info("No Content-Type request header!");
+ throw HTTP_400;
+ }
+
+ let inputStream = request.bodyInputStream;
+ let inputBody = CommonUtils.readBytesFromInputStream(inputStream);
+ let input = [];
+
+ let inputMediaType = request.getHeader("content-type");
+ if (inputMediaType == "application/json") {
+ try {
+ input = JSON.parse(inputBody);
+ } catch (ex) {
+ this._log.info("JSON parse error on input body!");
+ throw HTTP_400;
+ }
+
+ if (!Array.isArray(input)) {
+ this._log.info("Input JSON type not an array!");
+ return sendMozSvcError(request, response, "8");
+ }
+ } else if (inputMediaType == "application/newlines") {
+ for (let line of inputBody.split("\n")) {
+ let record;
+ try {
+ record = JSON.parse(line);
+ } catch (ex) {
+ this._log.info("JSON parse error on line!");
+ return sendMozSvcError(request, response, "8");
+ }
+
+ input.push(record);
+ }
+ } else {
+ this._log.info("Unknown media type: " + inputMediaType);
+ throw HTTP_415;
+ }
+
+ if (this._ensureUnmodifiedSince(request, response)) {
+ return;
+ }
+
+ let res = this.post(input, request.timestamp);
+ let body = JSON.stringify(res);
+ response.setHeader("Content-Type", "application/json", false);
+ this.timestamp = request.timestamp;
+ response.setHeader("X-Last-Modified", "" + this.timestamp, false);
+
+ response.setStatusLine(request.httpVersion, "200", "OK");
+ response.bodyOutputStream.write(body, body.length);
+ },
+
+ deleteHandler: function deleteHandler(request, response) {
+ this._log.debug("Invoking StorageServerCollection.DELETE.");
+
+ let options = this.parseOptions(request);
+
+ if (this._ensureUnmodifiedSince(request, response)) {
+ return;
+ }
+
+ let deleted = this.delete(options);
+ response.deleted = deleted;
+ this.timestamp = request.timestamp;
+
+ response.setStatusLine(request.httpVersion, 204, "No Content");
+ },
+
+ handler: function handler() {
+ let self = this;
+
+ return function(request, response) {
+ switch(request.method) {
+ case "GET":
+ return self.getHandler(request, response);
+
+ case "POST":
+ return self.postHandler(request, response);
+
+ case "DELETE":
+ return self.deleteHandler(request, response);
+
+ }
+
+ request.setHeader("Allow", "GET,POST,DELETE");
+ response.setStatusLine(request.httpVersion, 405, "Method Not Allowed");
+ };
+ },
+
+ _ensureUnmodifiedSince: function _ensureUnmodifiedSince(request, response) {
+ if (!request.hasHeader("x-if-unmodified-since")) {
+ return false;
+ }
+
+ let requestModified = parseInt(request.getHeader("x-if-unmodified-since"),
+ 10);
+ let serverModified = this.timestamp;
+
+ this._log.debug("Request modified time: " + requestModified +
+ "; Server modified time: " + serverModified);
+ if (serverModified <= requestModified) {
+ return false;
+ }
+
+ this._log.info("Conditional request rejected because client time older " +
+ "than collection timestamp.");
+ response.setStatusLine(request.httpVersion, 412, "Precondition Failed");
+ return true;
+ },
+};
+
+
+//===========================================================================//
+// httpd.js-based Storage server. //
+//===========================================================================//
+
+/**
+ * In general, the preferred way of using StorageServer is to directly
+ * introspect it. Callbacks are available for operations which are hard to
+ * verify through introspection, such as deletions.
+ *
+ * One of the goals of this server is to provide enough hooks for test code to
+ * find out what it needs without monkeypatching. Use this object as your
+ * prototype, and override as appropriate.
+ */
+this.StorageServerCallback = {
+ onCollectionDeleted: function onCollectionDeleted(user, collection) {},
+ onItemDeleted: function onItemDeleted(user, collection, bsoID) {},
+
+ /**
+ * Called at the top of every request.
+ *
+ * Allows the test to inspect the request. Hooks should be careful not to
+ * modify or change state of the request or they may impact future processing.
+ */
+ onRequest: function onRequest(request) {},
+};
+
+/**
+ * Construct a new test Storage server. Takes a callback object (e.g.,
+ * StorageServerCallback) as input.
+ */
+this.StorageServer = function StorageServer(callback) {
+ this.callback = callback || {__proto__: StorageServerCallback};
+ this.server = new HttpServer();
+ this.started = false;
+ this.users = {};
+ this.requestCount = 0;
+ this._log = Log.repository.getLogger(STORAGE_HTTP_LOGGER);
+
+ // Install our own default handler. This allows us to mess around with the
+ // whole URL space.
+ let handler = this.server._handler;
+ handler._handleDefault = this.handleDefault.bind(this, handler);
+}
+StorageServer.prototype = {
+ DEFAULT_QUOTA: 1024 * 1024, // # bytes.
+
+ server: null, // HttpServer.
+ users: null, // Map of username => {collections, password}.
+
+ /**
+ * If true, the server will allow any arbitrary user to be used.
+ *
+ * No authentication will be performed. Whatever user is detected from the
+ * URL or auth headers will be created (if needed) and used.
+ */
+ allowAllUsers: false,
+
+ /**
+ * Start the StorageServer's underlying HTTP server.
+ *
+ * @param port
+ * The numeric port on which to start. A falsy value implies to
+ * select any available port.
+ * @param cb
+ * A callback function (of no arguments) which is invoked after
+ * startup.
+ */
+ start: function start(port, cb) {
+ if (this.started) {
+ this._log.warn("Warning: server already started on " + this.port);
+ return;
+ }
+ if (!port) {
+ port = -1;
+ }
+ this.port = port;
+
+ try {
+ this.server.start(this.port);
+ this.port = this.server.identity.primaryPort;
+ this.started = true;
+ if (cb) {
+ cb();
+ }
+ } catch (ex) {
+ _("==========================================");
+ _("Got exception starting Storage HTTP server on port " + this.port);
+ _("Error: " + Log.exceptionStr(ex));
+ _("Is there a process already listening on port " + this.port + "?");
+ _("==========================================");
+ do_throw(ex);
+ }
+ },
+
+ /**
+ * Start the server synchronously.
+ *
+ * @param port
+ * The numeric port on which to start. The default is to choose
+ * any available port.
+ */
+ startSynchronous: function startSynchronous(port=-1) {
+ let cb = Async.makeSpinningCallback();
+ this.start(port, cb);
+ cb.wait();
+ },
+
+ /**
+ * Stop the StorageServer's HTTP server.
+ *
+ * @param cb
+ * A callback function. Invoked after the server has been stopped.
+ *
+ */
+ stop: function stop(cb) {
+ if (!this.started) {
+ this._log.warn("StorageServer: Warning: server not running. Can't stop " +
+ "me now!");
+ return;
+ }
+
+ this.server.stop(cb);
+ this.started = false;
+ },
+
+ serverTime: function serverTime() {
+ return new_timestamp();
+ },
+
+ /**
+ * Create a new user, complete with an empty set of collections.
+ *
+ * @param username
+ * The username to use. An Error will be thrown if a user by that name
+ * already exists.
+ * @param password
+ * A password string.
+ *
+ * @return a user object, as would be returned by server.user(username).
+ */
+ registerUser: function registerUser(username, password) {
+ if (username in this.users) {
+ throw new Error("User already exists.");
+ }
+
+ if (!isFinite(parseInt(username))) {
+ throw new Error("Usernames must be numeric: " + username);
+ }
+
+ this._log.info("Registering new user with server: " + username);
+ this.users[username] = {
+ password: password,
+ collections: {},
+ quota: this.DEFAULT_QUOTA,
+ };
+ return this.user(username);
+ },
+
+ userExists: function userExists(username) {
+ return username in this.users;
+ },
+
+ getCollection: function getCollection(username, collection) {
+ return this.users[username].collections[collection];
+ },
+
+ _insertCollection: function _insertCollection(collections, collection, bsos) {
+ let coll = new StorageServerCollection(bsos, true);
+ coll.collectionHandler = coll.handler();
+ collections[collection] = coll;
+ return coll;
+ },
+
+ createCollection: function createCollection(username, collection, bsos) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let collections = this.users[username].collections;
+ if (collection in collections) {
+ throw new Error("Collection already exists.");
+ }
+ return this._insertCollection(collections, collection, bsos);
+ },
+
+ deleteCollection: function deleteCollection(username, collection) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ delete this.users[username].collections[collection];
+ },
+
+ /**
+ * Accept a map like the following:
+ * {
+ * meta: {global: {version: 1, ...}},
+ * crypto: {"keys": {}, foo: {bar: 2}},
+ * bookmarks: {}
+ * }
+ * to cause collections and BSOs to be created.
+ * If a collection already exists, no error is raised.
+ * If a BSO already exists, it will be updated to the new contents.
+ */
+ createContents: function createContents(username, collections) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let userCollections = this.users[username].collections;
+ for (let [id, contents] of Object.entries(collections)) {
+ let coll = userCollections[id] ||
+ this._insertCollection(userCollections, id);
+ for (let [bsoID, payload] of Object.entries(contents)) {
+ coll.insert(bsoID, payload);
+ }
+ }
+ },
+
+ /**
+ * Insert a BSO in an existing collection.
+ */
+ insertBSO: function insertBSO(username, collection, bso) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let userCollections = this.users[username].collections;
+ if (!(collection in userCollections)) {
+ throw new Error("Unknown collection.");
+ }
+ userCollections[collection].insertBSO(bso);
+ return bso;
+ },
+
+ /**
+ * Delete all of the collections for the named user.
+ *
+ * @param username
+ * The name of the affected user.
+ */
+ deleteCollections: function deleteCollections(username) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+ let userCollections = this.users[username].collections;
+ for (let name in userCollections) {
+ let coll = userCollections[name];
+ this._log.trace("Bulk deleting " + name + " for " + username + "...");
+ coll.delete({});
+ }
+ this.users[username].collections = {};
+ },
+
+ getQuota: function getQuota(username) {
+ if (!(username in this.users)) {
+ throw new Error("Unknown user.");
+ }
+
+ return this.users[username].quota;
+ },
+
+ /**
+ * Obtain the newest timestamp of all collections for a user.
+ */
+ newestCollectionTimestamp: function newestCollectionTimestamp(username) {
+ let collections = this.users[username].collections;
+ let newest = 0;
+ for (let name in collections) {
+ let collection = collections[name];
+ if (collection.timestamp > newest) {
+ newest = collection.timestamp;
+ }
+ }
+
+ return newest;
+ },
+
+ /**
+ * Compute the object that is returned for an info/collections request.
+ */
+ infoCollections: function infoCollections(username) {
+ let responseObject = {};
+ let colls = this.users[username].collections;
+ for (let coll in colls) {
+ responseObject[coll] = colls[coll].timestamp;
+ }
+ this._log.trace("StorageServer: info/collections returning " +
+ JSON.stringify(responseObject));
+ return responseObject;
+ },
+
+ infoCounts: function infoCounts(username) {
+ let data = {};
+ let collections = this.users[username].collections;
+ for (let [k, v] of Object.entries(collections)) {
+ let count = v.count();
+ if (!count) {
+ continue;
+ }
+
+ data[k] = count;
+ }
+
+ return data;
+ },
+
+ infoUsage: function infoUsage(username) {
+ let data = {};
+ let collections = this.users[username].collections;
+ for (let [k, v] of Object.entries(collections)) {
+ data[k] = v.totalPayloadSize;
+ }
+
+ return data;
+ },
+
+ infoQuota: function infoQuota(username) {
+ let total = 0;
+ let usage = this.infoUsage(username);
+ for (let key in usage) {
+ let value = usage[key];
+ total += value;
+ }
+
+ return {
+ quota: this.getQuota(username),
+ usage: total
+ };
+ },
+
+ /**
+ * Simple accessor to allow collective binding and abbreviation of a bunch of
+ * methods. Yay!
+ * Use like this:
+ *
+ * let u = server.user("john");
+ * u.collection("bookmarks").bso("abcdefg").payload; // Etc.
+ *
+ * @return a proxy for the user data stored in this server.
+ */
+ user: function user(username) {
+ let collection = this.getCollection.bind(this, username);
+ let createCollection = this.createCollection.bind(this, username);
+ let createContents = this.createContents.bind(this, username);
+ let modified = function (collectionName) {
+ return collection(collectionName).timestamp;
+ }
+ let deleteCollections = this.deleteCollections.bind(this, username);
+ let quota = this.getQuota.bind(this, username);
+ return {
+ collection: collection,
+ createCollection: createCollection,
+ createContents: createContents,
+ deleteCollections: deleteCollections,
+ modified: modified,
+ quota: quota,
+ };
+ },
+
+ _pruneExpired: function _pruneExpired() {
+ let now = Date.now();
+
+ for (let username in this.users) {
+ let user = this.users[username];
+ for (let name in user.collections) {
+ let collection = user.collections[name];
+ for (let bso of collection.bsos()) {
+ // ttl === 0 is a special case, so we can't simply !ttl.
+ if (typeof(bso.ttl) != "number") {
+ continue;
+ }
+
+ let ttlDate = bso.modified + (bso.ttl * 1000);
+ if (ttlDate < now) {
+ this._log.info("Deleting BSO because TTL expired: " + bso.id);
+ bso.delete();
+ }
+ }
+ }
+ }
+ },
+
+ /*
+ * Regular expressions for splitting up Storage request paths.
+ * Storage URLs are of the form:
+ * /$apipath/$version/$userid/$further
+ * where $further is usually:
+ * storage/$collection/$bso
+ * or
+ * storage/$collection
+ * or
+ * info/$op
+ *
+ * We assume for the sake of simplicity that $apipath is empty.
+ *
+ * N.B., we don't follow any kind of username spec here, because as far as I
+ * can tell there isn't one. See Bug 689671. Instead we follow the Python
+ * server code.
+ *
+ * Path: [all, version, first, rest]
+ * Storage: [all, collection?, id?]
+ */
+ pathRE: /^\/([0-9]+(?:\.[0-9]+)?)(?:\/([0-9]+)\/([^\/]+)(?:\/(.+))?)?$/,
+ storageRE: /^([-_a-zA-Z0-9]+)(?:\/([-_a-zA-Z0-9]+)\/?)?$/,
+
+ defaultHeaders: {},
+
+ /**
+ * HTTP response utility.
+ */
+ respond: function respond(req, resp, code, status, body, headers, timestamp) {
+ this._log.info("Response: " + code + " " + status);
+ resp.setStatusLine(req.httpVersion, code, status);
+ if (!headers) {
+ headers = this.defaultHeaders;
+ }
+ for (let header in headers) {
+ let value = headers[header];
+ resp.setHeader(header, value, false);
+ }
+
+ if (timestamp) {
+ resp.setHeader("X-Timestamp", "" + timestamp, false);
+ }
+
+ if (body) {
+ resp.bodyOutputStream.write(body, body.length);
+ }
+ },
+
+ /**
+ * This is invoked by the HttpServer. `this` is bound to the StorageServer;
+ * `handler` is the HttpServer's handler.
+ *
+ * TODO: need to use the correct Storage API response codes and errors here.
+ */
+ handleDefault: function handleDefault(handler, req, resp) {
+ this.requestCount++;
+ let timestamp = new_timestamp();
+ try {
+ this._handleDefault(handler, req, resp, timestamp);
+ } catch (e) {
+ if (e instanceof HttpError) {
+ this.respond(req, resp, e.code, e.description, "", {}, timestamp);
+ } else {
+ this._log.warn("StorageServer: handleDefault caught an error", e);
+ throw e;
+ }
+ }
+ },
+
+ _handleDefault: function _handleDefault(handler, req, resp, timestamp) {
+ let path = req.path;
+ if (req.queryString.length) {
+ path += "?" + req.queryString;
+ }
+
+ this._log.debug("StorageServer: Handling request: " + req.method + " " +
+ path);
+
+ if (this.callback.onRequest) {
+ this.callback.onRequest(req);
+ }
+
+ // Prune expired records for all users at top of request. This is the
+ // easiest way to process TTLs since all requests go through here.
+ this._pruneExpired();
+
+ req.timestamp = timestamp;
+ resp.setHeader("X-Timestamp", "" + timestamp, false);
+
+ let parts = this.pathRE.exec(req.path);
+ if (!parts) {
+ this._log.debug("StorageServer: Unexpected request: bad URL " + req.path);
+ throw HTTP_404;
+ }
+
+ let [all, version, userPath, first, rest] = parts;
+ if (version != STORAGE_API_VERSION) {
+ this._log.debug("StorageServer: Unknown version.");
+ throw HTTP_404;
+ }
+
+ let username;
+
+ // By default, the server requires users to be authenticated. When a
+ // request arrives, the user must have been previously configured and
+ // the request must have authentication. In "allow all users" mode, we
+ // take the username from the URL, create the user on the fly, and don't
+ // perform any authentication.
+ if (!this.allowAllUsers) {
+ // Enforce authentication.
+ if (!req.hasHeader("authorization")) {
+ this.respond(req, resp, 401, "Authorization Required", "{}", {
+ "WWW-Authenticate": 'Basic realm="secret"'
+ });
+ return;
+ }
+
+ let ensureUserExists = function ensureUserExists(username) {
+ if (this.userExists(username)) {
+ return;
+ }
+
+ this._log.info("StorageServer: Unknown user: " + username);
+ throw HTTP_401;
+ }.bind(this);
+
+ let auth = req.getHeader("authorization");
+ this._log.debug("Authorization: " + auth);
+
+ if (auth.indexOf("Basic ") == 0) {
+ let decoded = CommonUtils.safeAtoB(auth.substr(6));
+ this._log.debug("Decoded Basic Auth: " + decoded);
+ let [user, password] = decoded.split(":", 2);
+
+ if (!password) {
+ this._log.debug("Malformed HTTP Basic Authorization header: " + auth);
+ throw HTTP_400;
+ }
+
+ this._log.debug("Got HTTP Basic auth for user: " + user);
+ ensureUserExists(user);
+ username = user;
+
+ if (this.users[user].password != password) {
+ this._log.debug("StorageServer: Provided password is not correct.");
+ throw HTTP_401;
+ }
+ // TODO support token auth.
+ } else {
+ this._log.debug("Unsupported HTTP authorization type: " + auth);
+ throw HTTP_500;
+ }
+ // All users mode.
+ } else {
+ // Auto create user with dummy password.
+ if (!this.userExists(userPath)) {
+ this.registerUser(userPath, "DUMMY-PASSWORD-*&%#");
+ }
+
+ username = userPath;
+ }
+
+ // Hand off to the appropriate handler for this path component.
+ if (first in this.toplevelHandlers) {
+ let handler = this.toplevelHandlers[first];
+ try {
+ return handler.call(this, handler, req, resp, version, username, rest);
+ } catch (ex) {
+ this._log.warn("Got exception during request", ex);
+ throw ex;
+ }
+ }
+ this._log.debug("StorageServer: Unknown top-level " + first);
+ throw HTTP_404;
+ },
+
+ /**
+ * Collection of the handler methods we use for top-level path components.
+ */
+ toplevelHandlers: {
+ "storage": function handleStorage(handler, req, resp, version, username,
+ rest) {
+ let respond = this.respond.bind(this, req, resp);
+ if (!rest || !rest.length) {
+ this._log.debug("StorageServer: top-level storage " +
+ req.method + " request.");
+
+ if (req.method != "DELETE") {
+ respond(405, "Method Not Allowed", null, {"Allow": "DELETE"});
+ return;
+ }
+
+ this.user(username).deleteCollections();
+
+ respond(204, "No Content");
+ return;
+ }
+
+ let match = this.storageRE.exec(rest);
+ if (!match) {
+ this._log.warn("StorageServer: Unknown storage operation " + rest);
+ throw HTTP_404;
+ }
+ let [all, collection, bsoID] = match;
+ let coll = this.getCollection(username, collection);
+ let collectionExisted = !!coll;
+
+ switch (req.method) {
+ case "GET":
+ // Tried to GET on a collection that doesn't exist.
+ if (!coll) {
+ respond(404, "Not Found");
+ return;
+ }
+
+ // No BSO URL parameter goes to collection handler.
+ if (!bsoID) {
+ return coll.collectionHandler(req, resp);
+ }
+
+ // Handle non-existent BSO.
+ let bso = coll.bso(bsoID);
+ if (!bso) {
+ respond(404, "Not Found");
+ return;
+ }
+
+ // Proxy to BSO handler.
+ return bso.getHandler(req, resp);
+
+ case "DELETE":
+ // Collection doesn't exist.
+ if (!coll) {
+ respond(404, "Not Found");
+ return;
+ }
+
+ // Deleting a specific BSO.
+ if (bsoID) {
+ let bso = coll.bso(bsoID);
+
+ // BSO does not exist on the server. Nothing to do.
+ if (!bso) {
+ respond(404, "Not Found");
+ return;
+ }
+
+ if (req.hasHeader("x-if-unmodified-since")) {
+ let modified = parseInt(req.getHeader("x-if-unmodified-since"));
+ CommonUtils.ensureMillisecondsTimestamp(modified);
+
+ if (bso.modified > modified) {
+ respond(412, "Precondition Failed");
+ return;
+ }
+ }
+
+ bso.delete();
+ coll.timestamp = req.timestamp;
+ this.callback.onItemDeleted(username, collection, bsoID);
+ respond(204, "No Content");
+ return;
+ }
+
+ // Proxy to collection handler.
+ coll.collectionHandler(req, resp);
+
+ // Spot if this is a DELETE for some IDs, and don't blow away the
+ // whole collection!
+ //
+ // We already handled deleting the BSOs by invoking the deleted
+ // collection's handler. However, in the case of
+ //
+ // DELETE storage/foobar
+ //
+ // we also need to remove foobar from the collections map. This
+ // clause tries to differentiate the above request from
+ //
+ // DELETE storage/foobar?ids=foo,baz
+ //
+ // and do the right thing.
+ // TODO: less hacky method.
+ if (-1 == req.queryString.indexOf("ids=")) {
+ // When you delete the entire collection, we drop it.
+ this._log.debug("Deleting entire collection.");
+ delete this.users[username].collections[collection];
+ this.callback.onCollectionDeleted(username, collection);
+ }
+
+ // Notify of item deletion.
+ let deleted = resp.deleted || [];
+ for (let i = 0; i < deleted.length; ++i) {
+ this.callback.onItemDeleted(username, collection, deleted[i]);
+ }
+ return;
+
+ case "POST":
+ case "PUT":
+ // Auto-create collection if it doesn't exist.
+ if (!coll) {
+ coll = this.createCollection(username, collection);
+ }
+
+ try {
+ if (bsoID) {
+ let bso = coll.bso(bsoID);
+ if (!bso) {
+ this._log.trace("StorageServer: creating BSO " + collection +
+ "/" + bsoID);
+ try {
+ bso = coll.insert(bsoID);
+ } catch (ex) {
+ return sendMozSvcError(req, resp, "8");
+ }
+ }
+
+ bso.putHandler(req, resp);
+
+ coll.timestamp = req.timestamp;
+ return resp;
+ }
+
+ return coll.collectionHandler(req, resp);
+ } catch (ex) {
+ if (ex instanceof HttpError) {
+ if (!collectionExisted) {
+ this.deleteCollection(username, collection);
+ }
+ }
+
+ throw ex;
+ }
+
+ default:
+ throw new Error("Request method " + req.method + " not implemented.");
+ }
+ },
+
+ "info": function handleInfo(handler, req, resp, version, username, rest) {
+ switch (rest) {
+ case "collections":
+ return this.handleInfoCollections(req, resp, username);
+
+ case "collection_counts":
+ return this.handleInfoCounts(req, resp, username);
+
+ case "collection_usage":
+ return this.handleInfoUsage(req, resp, username);
+
+ case "quota":
+ return this.handleInfoQuota(req, resp, username);
+
+ default:
+ this._log.warn("StorageServer: Unknown info operation " + rest);
+ throw HTTP_404;
+ }
+ }
+ },
+
+ handleInfoConditional: function handleInfoConditional(request, response,
+ user) {
+ if (!request.hasHeader("x-if-modified-since")) {
+ return false;
+ }
+
+ let requestModified = request.getHeader("x-if-modified-since");
+ requestModified = parseInt(requestModified, 10);
+
+ let serverModified = this.newestCollectionTimestamp(user);
+
+ this._log.info("Server mtime: " + serverModified + "; Client modified: " +
+ requestModified);
+ if (serverModified > requestModified) {
+ return false;
+ }
+
+ this.respond(request, response, 304, "Not Modified", null, {
+ "X-Last-Modified": "" + serverModified
+ });
+
+ return true;
+ },
+
+ handleInfoCollections: function handleInfoCollections(request, response,
+ user) {
+ if (this.handleInfoConditional(request, response, user)) {
+ return;
+ }
+
+ let info = this.infoCollections(user);
+ let body = JSON.stringify(info);
+ this.respond(request, response, 200, "OK", body, {
+ "Content-Type": "application/json",
+ "X-Last-Modified": "" + this.newestCollectionTimestamp(user),
+ });
+ },
+
+ handleInfoCounts: function handleInfoCounts(request, response, user) {
+ if (this.handleInfoConditional(request, response, user)) {
+ return;
+ }
+
+ let counts = this.infoCounts(user);
+ let body = JSON.stringify(counts);
+
+ this.respond(request, response, 200, "OK", body, {
+ "Content-Type": "application/json",
+ "X-Last-Modified": "" + this.newestCollectionTimestamp(user),
+ });
+ },
+
+ handleInfoUsage: function handleInfoUsage(request, response, user) {
+ if (this.handleInfoConditional(request, response, user)) {
+ return;
+ }
+
+ let body = JSON.stringify(this.infoUsage(user));
+ this.respond(request, response, 200, "OK", body, {
+ "Content-Type": "application/json",
+ "X-Last-Modified": "" + this.newestCollectionTimestamp(user),
+ });
+ },
+
+ handleInfoQuota: function handleInfoQuota(request, response, user) {
+ if (this.handleInfoConditional(request, response, user)) {
+ return;
+ }
+
+ let body = JSON.stringify(this.infoQuota(user));
+ this.respond(request, response, 200, "OK", body, {
+ "Content-Type": "application/json",
+ "X-Last-Modified": "" + this.newestCollectionTimestamp(user),
+ });
+ },
+};
+
+/**
+ * Helper to create a storage server for a set of users.
+ *
+ * Each user is specified by a map of username to password.
+ */
+this.storageServerForUsers =
+ function storageServerForUsers(users, contents, callback) {
+ let server = new StorageServer(callback);
+ for (let [user, pass] of Object.entries(users)) {
+ server.registerUser(user, pass);
+ server.createContents(user, contents);
+ }
+ server.start();
+ return server;
+}
diff --git a/services/common/modules-testing/utils.js b/services/common/modules-testing/utils.js
new file mode 100644
index 000000000..e909afc48
--- /dev/null
+++ b/services/common/modules-testing/utils.js
@@ -0,0 +1,42 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "TestingUtils",
+];
+
+this.TestingUtils = {
+ /**
+ * Perform a deep copy of an Array or Object.
+ */
+ deepCopy: function deepCopy(thing, noSort) {
+ if (typeof(thing) != "object" || thing == null) {
+ return thing;
+ }
+
+ if (Array.isArray(thing)) {
+ let ret = [];
+ for (let element of thing) {
+ ret.push(this.deepCopy(element, noSort));
+ }
+
+ return ret;
+ }
+
+ let ret = {};
+ let props = Object.keys(thing);
+
+ if (!noSort) {
+ props = props.sort();
+ }
+
+ for (let prop of props) {
+ ret[prop] = this.deepCopy(thing[prop], noSort);
+ }
+
+ return ret;
+ },
+};
diff --git a/services/common/moz.build b/services/common/moz.build
new file mode 100644
index 000000000..c09e6bed0
--- /dev/null
+++ b/services/common/moz.build
@@ -0,0 +1,48 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files('**'):
+ BUG_COMPONENT = ('Mozilla Services', 'Firefox: Common')
+
+TEST_DIRS += ['tests']
+
+EXTRA_COMPONENTS += [
+ 'servicesComponents.manifest',
+]
+
+EXTRA_JS_MODULES['services-common'] += [
+ 'async.js',
+ 'blocklist-clients.js',
+ 'blocklist-updater.js',
+ 'kinto-http-client.js',
+ 'kinto-offline-client.js',
+ 'logmanager.js',
+ 'observers.js',
+ 'rest.js',
+ 'stringbundle.js',
+ 'utils.js',
+]
+
+if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'android':
+ EXTRA_JS_MODULES['services-common'] += [
+ 'hawkclient.js',
+ 'hawkrequest.js',
+ 'tokenserverclient.js',
+ ]
+
+ TESTING_JS_MODULES.services.common += [
+ 'modules-testing/storageserver.js',
+ ]
+
+TESTING_JS_MODULES.services.common += [
+ 'modules-testing/logging.js',
+ 'modules-testing/utils.js',
+]
+
+JS_PREFERENCE_FILES += [
+ 'services-common.js',
+]
+
diff --git a/services/common/observers.js b/services/common/observers.js
new file mode 100644
index 000000000..c0b771048
--- /dev/null
+++ b/services/common/observers.js
@@ -0,0 +1,150 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["Observers"];
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cr = Components.results;
+var Cu = Components.utils;
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+
+/**
+ * A service for adding, removing and notifying observers of notifications.
+ * Wraps the nsIObserverService interface.
+ *
+ * @version 0.2
+ */
+this.Observers = {
+ /**
+ * Register the given callback as an observer of the given topic.
+ *
+ * @param topic {String}
+ * the topic to observe
+ *
+ * @param callback {Object}
+ * the callback; an Object that implements nsIObserver or a Function
+ * that gets called when the notification occurs
+ *
+ * @param thisObject {Object} [optional]
+ * the object to use as |this| when calling a Function callback
+ *
+ * @returns the observer
+ */
+ add: function(topic, callback, thisObject) {
+ let observer = new Observer(topic, callback, thisObject);
+ this._cache.push(observer);
+ this._service.addObserver(observer, topic, true);
+
+ return observer;
+ },
+
+ /**
+ * Unregister the given callback as an observer of the given topic.
+ *
+ * @param topic {String}
+ * the topic being observed
+ *
+ * @param callback {Object}
+ * the callback doing the observing
+ *
+ * @param thisObject {Object} [optional]
+ * the object being used as |this| when calling a Function callback
+ */
+ remove: function(topic, callback, thisObject) {
+ // This seems fairly inefficient, but I'm not sure how much better
+ // we can make it. We could index by topic, but we can't index by callback
+ // or thisObject, as far as I know, since the keys to JavaScript hashes
+ // (a.k.a. objects) can apparently only be primitive values.
+ let [observer] = this._cache.filter(v => v.topic == topic &&
+ v.callback == callback &&
+ v.thisObject == thisObject);
+ if (observer) {
+ this._service.removeObserver(observer, topic);
+ this._cache.splice(this._cache.indexOf(observer), 1);
+ }
+ },
+
+ /**
+ * Notify observers about something.
+ *
+ * @param topic {String}
+ * the topic to notify observers about
+ *
+ * @param subject {Object} [optional]
+ * some information about the topic; can be any JS object or primitive
+ *
+ * @param data {String} [optional] [deprecated]
+ * some more information about the topic; deprecated as the subject
+ * is sufficient to pass all needed information to the JS observers
+ * that this module targets; if you have multiple values to pass to
+ * the observer, wrap them in an object and pass them via the subject
+ * parameter (i.e.: { foo: 1, bar: "some string", baz: myObject })
+ */
+ notify: function(topic, subject, data) {
+ subject = (typeof subject == "undefined") ? null : new Subject(subject);
+ data = (typeof data == "undefined") ? null : data;
+ this._service.notifyObservers(subject, topic, data);
+ },
+
+ _service: Cc["@mozilla.org/observer-service;1"].
+ getService(Ci.nsIObserverService),
+
+ /**
+ * A cache of observers that have been added.
+ *
+ * We use this to remove observers when a caller calls |remove|.
+ *
+ * XXX This might result in reference cycles, causing memory leaks,
+ * if we hold a reference to an observer that holds a reference to us.
+ * Could we fix that by making this an independent top-level object
+ * rather than a property of this object?
+ */
+ _cache: []
+};
+
+
+function Observer(topic, callback, thisObject) {
+ this.topic = topic;
+ this.callback = callback;
+ this.thisObject = thisObject;
+}
+
+Observer.prototype = {
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsIObserver, Ci.nsISupportsWeakReference]),
+ observe: function(subject, topic, data) {
+ // Extract the wrapped object for subjects that are one of our wrappers
+ // around a JS object. This way we support both wrapped subjects created
+ // using this module and those that are real XPCOM components.
+ if (subject && typeof subject == "object" &&
+ ("wrappedJSObject" in subject) &&
+ ("observersModuleSubjectWrapper" in subject.wrappedJSObject))
+ subject = subject.wrappedJSObject.object;
+
+ if (typeof this.callback == "function") {
+ if (this.thisObject)
+ this.callback.call(this.thisObject, subject, data);
+ else
+ this.callback(subject, data);
+ }
+ else // typeof this.callback == "object" (nsIObserver)
+ this.callback.observe(subject, topic, data);
+ }
+}
+
+
+function Subject(object) {
+ // Double-wrap the object and set a property identifying the wrappedJSObject
+ // as one of our wrappers to distinguish between subjects that are one of our
+ // wrappers (which we should unwrap when notifying our observers) and those
+ // that are real JS XPCOM components (which we should pass through unaltered).
+ this.wrappedJSObject = { observersModuleSubjectWrapper: true, object: object };
+}
+
+Subject.prototype = {
+ QueryInterface: XPCOMUtils.generateQI([]),
+ getScriptableHelper: function() {},
+ getInterfaces: function() {}
+};
diff --git a/services/common/rest.js b/services/common/rest.js
new file mode 100644
index 000000000..5474dd947
--- /dev/null
+++ b/services/common/rest.js
@@ -0,0 +1,764 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+this.EXPORTED_SYMBOLS = [
+ "RESTRequest",
+ "RESTResponse",
+ "TokenAuthenticatedRESTRequest",
+];
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/NetUtil.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/utils.js");
+
+XPCOMUtils.defineLazyModuleGetter(this, "CryptoUtils",
+ "resource://services-crypto/utils.js");
+
+const Prefs = new Preferences("services.common.");
+
+/**
+ * Single use HTTP requests to RESTish resources.
+ *
+ * @param uri
+ * URI for the request. This can be an nsIURI object or a string
+ * that can be used to create one. An exception will be thrown if
+ * the string is not a valid URI.
+ *
+ * Examples:
+ *
+ * (1) Quick GET request:
+ *
+ * new RESTRequest("http://server/rest/resource").get(function (error) {
+ * if (error) {
+ * // Deal with a network error.
+ * processNetworkErrorCode(error.result);
+ * return;
+ * }
+ * if (!this.response.success) {
+ * // Bail out if we're not getting an HTTP 2xx code.
+ * processHTTPError(this.response.status);
+ * return;
+ * }
+ * processData(this.response.body);
+ * });
+ *
+ * (2) Quick PUT request (non-string data is automatically JSONified)
+ *
+ * new RESTRequest("http://server/rest/resource").put(data, function (error) {
+ * ...
+ * });
+ *
+ * (3) Streaming GET
+ *
+ * let request = new RESTRequest("http://server/rest/resource");
+ * request.setHeader("Accept", "application/newlines");
+ * request.onComplete = function (error) {
+ * if (error) {
+ * // Deal with a network error.
+ * processNetworkErrorCode(error.result);
+ * return;
+ * }
+ * callbackAfterRequestHasCompleted()
+ * });
+ * request.onProgress = function () {
+ * if (!this.response.success) {
+ * // Bail out if we're not getting an HTTP 2xx code.
+ * return;
+ * }
+ * // Process body data and reset it so we don't process the same data twice.
+ * processIncrementalData(this.response.body);
+ * this.response.body = "";
+ * });
+ * request.get();
+ */
+this.RESTRequest = function RESTRequest(uri) {
+ this.status = this.NOT_SENT;
+
+ // If we don't have an nsIURI object yet, make one. This will throw if
+ // 'uri' isn't a valid URI string.
+ if (!(uri instanceof Ci.nsIURI)) {
+ uri = Services.io.newURI(uri, null, null);
+ }
+ this.uri = uri;
+
+ this._headers = {};
+ this._log = Log.repository.getLogger(this._logName);
+ this._log.level =
+ Log.Level[Prefs.get("log.logger.rest.request")];
+}
+RESTRequest.prototype = {
+
+ _logName: "Services.Common.RESTRequest",
+
+ QueryInterface: XPCOMUtils.generateQI([
+ Ci.nsIBadCertListener2,
+ Ci.nsIInterfaceRequestor,
+ Ci.nsIChannelEventSink
+ ]),
+
+ /*** Public API: ***/
+
+ /**
+ * A constant boolean that indicates whether this object will automatically
+ * utf-8 encode request bodies passed as an object. Used for feature detection
+ * so, eg, loop can use the same source code for old and new Firefox versions.
+ */
+ willUTF8EncodeObjectRequests: true,
+
+ /**
+ * URI for the request (an nsIURI object).
+ */
+ uri: null,
+
+ /**
+ * HTTP method (e.g. "GET")
+ */
+ method: null,
+
+ /**
+ * RESTResponse object
+ */
+ response: null,
+
+ /**
+ * nsIRequest load flags. Don't do any caching by default. Don't send user
+ * cookies and such over the wire (Bug 644734).
+ */
+ loadFlags: Ci.nsIRequest.LOAD_BYPASS_CACHE | Ci.nsIRequest.INHIBIT_CACHING | Ci.nsIRequest.LOAD_ANONYMOUS,
+
+ /**
+ * nsIHttpChannel
+ */
+ channel: null,
+
+ /**
+ * Flag to indicate the status of the request.
+ *
+ * One of NOT_SENT, SENT, IN_PROGRESS, COMPLETED, ABORTED.
+ */
+ status: null,
+
+ NOT_SENT: 0,
+ SENT: 1,
+ IN_PROGRESS: 2,
+ COMPLETED: 4,
+ ABORTED: 8,
+
+ /**
+ * HTTP status text of response
+ */
+ statusText: null,
+
+ /**
+ * Request timeout (in seconds, though decimal values can be used for
+ * up to millisecond granularity.)
+ *
+ * 0 for no timeout.
+ */
+ timeout: null,
+
+ /**
+ * The encoding with which the response to this request must be treated.
+ * If a charset parameter is available in the HTTP Content-Type header for
+ * this response, that will always be used, and this value is ignored. We
+ * default to UTF-8 because that is a reasonable default.
+ */
+ charset: "utf-8",
+
+ /**
+ * Called when the request has been completed, including failures and
+ * timeouts.
+ *
+ * @param error
+ * Error that occurred while making the request, null if there
+ * was no error.
+ */
+ onComplete: function onComplete(error) {
+ },
+
+ /**
+ * Called whenever data is being received on the channel. If this throws an
+ * exception, the request is aborted and the exception is passed as the
+ * error to onComplete().
+ */
+ onProgress: function onProgress() {
+ },
+
+ /**
+ * Set a request header.
+ */
+ setHeader: function setHeader(name, value) {
+ this._headers[name.toLowerCase()] = value;
+ },
+
+ /**
+ * Perform an HTTP GET.
+ *
+ * @param onComplete
+ * Short-circuit way to set the 'onComplete' method. Optional.
+ * @param onProgress
+ * Short-circuit way to set the 'onProgress' method. Optional.
+ *
+ * @return the request object.
+ */
+ get: function get(onComplete, onProgress) {
+ return this.dispatch("GET", null, onComplete, onProgress);
+ },
+
+ /**
+ * Perform an HTTP PATCH.
+ *
+ * @param data
+ * Data to be used as the request body. If this isn't a string
+ * it will be JSONified automatically.
+ * @param onComplete
+ * Short-circuit way to set the 'onComplete' method. Optional.
+ * @param onProgress
+ * Short-circuit way to set the 'onProgress' method. Optional.
+ *
+ * @return the request object.
+ */
+ patch: function patch(data, onComplete, onProgress) {
+ return this.dispatch("PATCH", data, onComplete, onProgress);
+ },
+
+ /**
+ * Perform an HTTP PUT.
+ *
+ * @param data
+ * Data to be used as the request body. If this isn't a string
+ * it will be JSONified automatically.
+ * @param onComplete
+ * Short-circuit way to set the 'onComplete' method. Optional.
+ * @param onProgress
+ * Short-circuit way to set the 'onProgress' method. Optional.
+ *
+ * @return the request object.
+ */
+ put: function put(data, onComplete, onProgress) {
+ return this.dispatch("PUT", data, onComplete, onProgress);
+ },
+
+ /**
+ * Perform an HTTP POST.
+ *
+ * @param data
+ * Data to be used as the request body. If this isn't a string
+ * it will be JSONified automatically.
+ * @param onComplete
+ * Short-circuit way to set the 'onComplete' method. Optional.
+ * @param onProgress
+ * Short-circuit way to set the 'onProgress' method. Optional.
+ *
+ * @return the request object.
+ */
+ post: function post(data, onComplete, onProgress) {
+ return this.dispatch("POST", data, onComplete, onProgress);
+ },
+
+ /**
+ * Perform an HTTP DELETE.
+ *
+ * @param onComplete
+ * Short-circuit way to set the 'onComplete' method. Optional.
+ * @param onProgress
+ * Short-circuit way to set the 'onProgress' method. Optional.
+ *
+ * @return the request object.
+ */
+ delete: function delete_(onComplete, onProgress) {
+ return this.dispatch("DELETE", null, onComplete, onProgress);
+ },
+
+ /**
+ * Abort an active request.
+ */
+ abort: function abort() {
+ if (this.status != this.SENT && this.status != this.IN_PROGRESS) {
+ throw "Can only abort a request that has been sent.";
+ }
+
+ this.status = this.ABORTED;
+ this.channel.cancel(Cr.NS_BINDING_ABORTED);
+
+ if (this.timeoutTimer) {
+ // Clear the abort timer now that the channel is done.
+ this.timeoutTimer.clear();
+ }
+ },
+
+ /*** Implementation stuff ***/
+
+ dispatch: function dispatch(method, data, onComplete, onProgress) {
+ if (this.status != this.NOT_SENT) {
+ throw "Request has already been sent!";
+ }
+
+ this.method = method;
+ if (onComplete) {
+ this.onComplete = onComplete;
+ }
+ if (onProgress) {
+ this.onProgress = onProgress;
+ }
+
+ // Create and initialize HTTP channel.
+ let channel = NetUtil.newChannel({uri: this.uri, loadUsingSystemPrincipal: true})
+ .QueryInterface(Ci.nsIRequest)
+ .QueryInterface(Ci.nsIHttpChannel);
+ this.channel = channel;
+ channel.loadFlags |= this.loadFlags;
+ channel.notificationCallbacks = this;
+
+ this._log.debug(`${method} request to ${this.uri.spec}`);
+ // Set request headers.
+ let headers = this._headers;
+ for (let key in headers) {
+ if (key == 'authorization') {
+ this._log.trace("HTTP Header " + key + ": ***** (suppressed)");
+ } else {
+ this._log.trace("HTTP Header " + key + ": " + headers[key]);
+ }
+ channel.setRequestHeader(key, headers[key], false);
+ }
+
+ // Set HTTP request body.
+ if (method == "PUT" || method == "POST" || method == "PATCH") {
+ // Convert non-string bodies into JSON with utf-8 encoding. If a string
+ // is passed we assume they've already encoded it.
+ let contentType = headers["content-type"];
+ if (typeof data != "string") {
+ data = JSON.stringify(data);
+ if (!contentType) {
+ contentType = "application/json";
+ }
+ if (!contentType.includes("charset")) {
+ data = CommonUtils.encodeUTF8(data);
+ contentType += "; charset=utf-8";
+ } else {
+ // If someone handed us an object but also a custom content-type
+ // it's probably confused. We could go to even further lengths to
+ // respect it, but this shouldn't happen in practice.
+ Cu.reportError("rest.js found an object to JSON.stringify but also a " +
+ "content-type header with a charset specification. " +
+ "This probably isn't going to do what you expect");
+ }
+ }
+ if (!contentType) {
+ contentType = "text/plain";
+ }
+
+ this._log.debug(method + " Length: " + data.length);
+ if (this._log.level <= Log.Level.Trace) {
+ this._log.trace(method + " Body: " + data);
+ }
+
+ let stream = Cc["@mozilla.org/io/string-input-stream;1"]
+ .createInstance(Ci.nsIStringInputStream);
+ stream.setData(data, data.length);
+
+ channel.QueryInterface(Ci.nsIUploadChannel);
+ channel.setUploadStream(stream, contentType, data.length);
+ }
+ // We must set this after setting the upload stream, otherwise it
+ // will always be 'PUT'. Yeah, I know.
+ channel.requestMethod = method;
+
+ // Before opening the channel, set the charset that serves as a hint
+ // as to what the response might be encoded as.
+ channel.contentCharset = this.charset;
+
+ // Blast off!
+ try {
+ channel.asyncOpen2(this);
+ } catch (ex) {
+ // asyncOpen can throw in a bunch of cases -- e.g., a forbidden port.
+ this._log.warn("Caught an error in asyncOpen", ex);
+ CommonUtils.nextTick(onComplete.bind(this, ex));
+ }
+ this.status = this.SENT;
+ this.delayTimeout();
+ return this;
+ },
+
+ /**
+ * Create or push back the abort timer that kills this request.
+ */
+ delayTimeout: function delayTimeout() {
+ if (this.timeout) {
+ CommonUtils.namedTimer(this.abortTimeout, this.timeout * 1000, this,
+ "timeoutTimer");
+ }
+ },
+
+ /**
+ * Abort the request based on a timeout.
+ */
+ abortTimeout: function abortTimeout() {
+ this.abort();
+ let error = Components.Exception("Aborting due to channel inactivity.",
+ Cr.NS_ERROR_NET_TIMEOUT);
+ if (!this.onComplete) {
+ this._log.error("Unexpected error: onComplete not defined in " +
+ "abortTimeout.");
+ return;
+ }
+ this.onComplete(error);
+ },
+
+ /*** nsIStreamListener ***/
+
+ onStartRequest: function onStartRequest(channel) {
+ if (this.status == this.ABORTED) {
+ this._log.trace("Not proceeding with onStartRequest, request was aborted.");
+ return;
+ }
+
+ try {
+ channel.QueryInterface(Ci.nsIHttpChannel);
+ } catch (ex) {
+ this._log.error("Unexpected error: channel is not a nsIHttpChannel!");
+ this.status = this.ABORTED;
+ channel.cancel(Cr.NS_BINDING_ABORTED);
+ return;
+ }
+
+ this.status = this.IN_PROGRESS;
+
+ this._log.trace("onStartRequest: " + channel.requestMethod + " " +
+ channel.URI.spec);
+
+ // Create a response object and fill it with some data.
+ let response = this.response = new RESTResponse();
+ response.request = this;
+ response.body = "";
+
+ this.delayTimeout();
+ },
+
+ onStopRequest: function onStopRequest(channel, context, statusCode) {
+ if (this.timeoutTimer) {
+ // Clear the abort timer now that the channel is done.
+ this.timeoutTimer.clear();
+ }
+
+ // We don't want to do anything for a request that's already been aborted.
+ if (this.status == this.ABORTED) {
+ this._log.trace("Not proceeding with onStopRequest, request was aborted.");
+ return;
+ }
+
+ try {
+ channel.QueryInterface(Ci.nsIHttpChannel);
+ } catch (ex) {
+ this._log.error("Unexpected error: channel not nsIHttpChannel!");
+ this.status = this.ABORTED;
+ return;
+ }
+ this.status = this.COMPLETED;
+
+ let statusSuccess = Components.isSuccessCode(statusCode);
+ let uri = channel && channel.URI && channel.URI.spec || "<unknown>";
+ this._log.trace("Channel for " + channel.requestMethod + " " + uri +
+ " returned status code " + statusCode);
+
+ if (!this.onComplete) {
+ this._log.error("Unexpected error: onComplete not defined in " +
+ "abortRequest.");
+ this.onProgress = null;
+ return;
+ }
+
+ // Throw the failure code and stop execution. Use Components.Exception()
+ // instead of Error() so the exception is QI-able and can be passed across
+ // XPCOM borders while preserving the status code.
+ if (!statusSuccess) {
+ let message = Components.Exception("", statusCode).name;
+ let error = Components.Exception(message, statusCode);
+ this._log.debug(this.method + " " + uri + " failed: " + statusCode + " - " + message);
+ this.onComplete(error);
+ this.onComplete = this.onProgress = null;
+ return;
+ }
+
+ this._log.debug(this.method + " " + uri + " " + this.response.status);
+
+ // Additionally give the full response body when Trace logging.
+ if (this._log.level <= Log.Level.Trace) {
+ this._log.trace(this.method + " body: " + this.response.body);
+ }
+
+ delete this._inputStream;
+
+ this.onComplete(null);
+ this.onComplete = this.onProgress = null;
+ },
+
+ onDataAvailable: function onDataAvailable(channel, cb, stream, off, count) {
+ // We get an nsIRequest, which doesn't have contentCharset.
+ try {
+ channel.QueryInterface(Ci.nsIHttpChannel);
+ } catch (ex) {
+ this._log.error("Unexpected error: channel not nsIHttpChannel!");
+ this.abort();
+
+ if (this.onComplete) {
+ this.onComplete(ex);
+ }
+
+ this.onComplete = this.onProgress = null;
+ return;
+ }
+
+ if (channel.contentCharset) {
+ this.response.charset = channel.contentCharset;
+
+ if (!this._converterStream) {
+ this._converterStream = Cc["@mozilla.org/intl/converter-input-stream;1"]
+ .createInstance(Ci.nsIConverterInputStream);
+ }
+
+ this._converterStream.init(stream, channel.contentCharset, 0,
+ this._converterStream.DEFAULT_REPLACEMENT_CHARACTER);
+
+ try {
+ let str = {};
+ let num = this._converterStream.readString(count, str);
+ if (num != 0) {
+ this.response.body += str.value;
+ }
+ } catch (ex) {
+ this._log.warn("Exception thrown reading " + count + " bytes from " +
+ "the channel", ex);
+ throw ex;
+ }
+ } else {
+ this.response.charset = null;
+
+ if (!this._inputStream) {
+ this._inputStream = Cc["@mozilla.org/scriptableinputstream;1"]
+ .createInstance(Ci.nsIScriptableInputStream);
+ }
+
+ this._inputStream.init(stream);
+
+ this.response.body += this._inputStream.read(count);
+ }
+
+ try {
+ this.onProgress();
+ } catch (ex) {
+ this._log.warn("Got exception calling onProgress handler, aborting " +
+ this.method + " " + channel.URI.spec, ex);
+ this.abort();
+
+ if (!this.onComplete) {
+ this._log.error("Unexpected error: onComplete not defined in " +
+ "onDataAvailable.");
+ this.onProgress = null;
+ return;
+ }
+
+ this.onComplete(ex);
+ this.onComplete = this.onProgress = null;
+ return;
+ }
+
+ this.delayTimeout();
+ },
+
+ /*** nsIInterfaceRequestor ***/
+
+ getInterface: function(aIID) {
+ return this.QueryInterface(aIID);
+ },
+
+ /*** nsIBadCertListener2 ***/
+
+ notifyCertProblem: function notifyCertProblem(socketInfo, sslStatus, targetHost) {
+ this._log.warn("Invalid HTTPS certificate encountered!");
+ // Suppress invalid HTTPS certificate warnings in the UI.
+ // (The request will still fail.)
+ return true;
+ },
+
+ /**
+ * Returns true if headers from the old channel should be
+ * copied to the new channel. Invoked when a channel redirect
+ * is in progress.
+ */
+ shouldCopyOnRedirect: function shouldCopyOnRedirect(oldChannel, newChannel, flags) {
+ let isInternal = !!(flags & Ci.nsIChannelEventSink.REDIRECT_INTERNAL);
+ let isSameURI = newChannel.URI.equals(oldChannel.URI);
+ this._log.debug("Channel redirect: " + oldChannel.URI.spec + ", " +
+ newChannel.URI.spec + ", internal = " + isInternal);
+ return isInternal && isSameURI;
+ },
+
+ /*** nsIChannelEventSink ***/
+ asyncOnChannelRedirect:
+ function asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) {
+
+ let oldSpec = (oldChannel && oldChannel.URI) ? oldChannel.URI.spec : "<undefined>";
+ let newSpec = (newChannel && newChannel.URI) ? newChannel.URI.spec : "<undefined>";
+ this._log.debug("Channel redirect: " + oldSpec + ", " + newSpec + ", " + flags);
+
+ try {
+ newChannel.QueryInterface(Ci.nsIHttpChannel);
+ } catch (ex) {
+ this._log.error("Unexpected error: channel not nsIHttpChannel!");
+ callback.onRedirectVerifyCallback(Cr.NS_ERROR_NO_INTERFACE);
+ return;
+ }
+
+ // For internal redirects, copy the headers that our caller set.
+ try {
+ if (this.shouldCopyOnRedirect(oldChannel, newChannel, flags)) {
+ this._log.trace("Copying headers for safe internal redirect.");
+ for (let key in this._headers) {
+ newChannel.setRequestHeader(key, this._headers[key], false);
+ }
+ }
+ } catch (ex) {
+ this._log.error("Error copying headers", ex);
+ }
+
+ this.channel = newChannel;
+
+ // We let all redirects proceed.
+ callback.onRedirectVerifyCallback(Cr.NS_OK);
+ }
+};
+
+/**
+ * Response object for a RESTRequest. This will be created automatically by
+ * the RESTRequest.
+ */
+this.RESTResponse = function RESTResponse() {
+ this._log = Log.repository.getLogger(this._logName);
+ this._log.level =
+ Log.Level[Prefs.get("log.logger.rest.response")];
+}
+RESTResponse.prototype = {
+
+ _logName: "Services.Common.RESTResponse",
+
+ /**
+ * Corresponding REST request
+ */
+ request: null,
+
+ /**
+ * HTTP status code
+ */
+ get status() {
+ let status;
+ try {
+ status = this.request.channel.responseStatus;
+ } catch (ex) {
+ this._log.debug("Caught exception fetching HTTP status code", ex);
+ return null;
+ }
+ Object.defineProperty(this, "status", {value: status});
+ return status;
+ },
+
+ /**
+ * HTTP status text
+ */
+ get statusText() {
+ let statusText;
+ try {
+ statusText = this.request.channel.responseStatusText;
+ } catch (ex) {
+ this._log.debug("Caught exception fetching HTTP status text", ex);
+ return null;
+ }
+ Object.defineProperty(this, "statusText", {value: statusText});
+ return statusText;
+ },
+
+ /**
+ * Boolean flag that indicates whether the HTTP status code is 2xx or not.
+ */
+ get success() {
+ let success;
+ try {
+ success = this.request.channel.requestSucceeded;
+ } catch (ex) {
+ this._log.debug("Caught exception fetching HTTP success flag", ex);
+ return null;
+ }
+ Object.defineProperty(this, "success", {value: success});
+ return success;
+ },
+
+ /**
+ * Object containing HTTP headers (keyed as lower case)
+ */
+ get headers() {
+ let headers = {};
+ try {
+ this._log.trace("Processing response headers.");
+ let channel = this.request.channel.QueryInterface(Ci.nsIHttpChannel);
+ channel.visitResponseHeaders(function (header, value) {
+ headers[header.toLowerCase()] = value;
+ });
+ } catch (ex) {
+ this._log.debug("Caught exception processing response headers", ex);
+ return null;
+ }
+
+ Object.defineProperty(this, "headers", {value: headers});
+ return headers;
+ },
+
+ /**
+ * HTTP body (string)
+ */
+ body: null
+
+};
+
+/**
+ * Single use MAC authenticated HTTP requests to RESTish resources.
+ *
+ * @param uri
+ * URI going to the RESTRequest constructor.
+ * @param authToken
+ * (Object) An auth token of the form {id: (string), key: (string)}
+ * from which the MAC Authentication header for this request will be
+ * derived. A token as obtained from
+ * TokenServerClient.getTokenFromBrowserIDAssertion is accepted.
+ * @param extra
+ * (Object) Optional extra parameters. Valid keys are: nonce_bytes, ts,
+ * nonce, and ext. See CrytoUtils.computeHTTPMACSHA1 for information on
+ * the purpose of these values.
+ */
+this.TokenAuthenticatedRESTRequest =
+ function TokenAuthenticatedRESTRequest(uri, authToken, extra) {
+ RESTRequest.call(this, uri);
+ this.authToken = authToken;
+ this.extra = extra || {};
+}
+TokenAuthenticatedRESTRequest.prototype = {
+ __proto__: RESTRequest.prototype,
+
+ dispatch: function dispatch(method, data, onComplete, onProgress) {
+ let sig = CryptoUtils.computeHTTPMACSHA1(
+ this.authToken.id, this.authToken.key, method, this.uri, this.extra
+ );
+
+ this.setHeader("Authorization", sig.getHeader());
+
+ return RESTRequest.prototype.dispatch.call(
+ this, method, data, onComplete, onProgress
+ );
+ },
+};
diff --git a/services/common/services-common.js b/services/common/services-common.js
new file mode 100644
index 000000000..bc37d4028
--- /dev/null
+++ b/services/common/services-common.js
@@ -0,0 +1,11 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// This file contains default preference values for components in
+// services-common.
+
+pref("services.common.log.logger.rest.request", "Debug");
+pref("services.common.log.logger.rest.response", "Debug");
+
+pref("services.common.log.logger.tokenserverclient", "Debug");
diff --git a/services/common/servicesComponents.manifest b/services/common/servicesComponents.manifest
new file mode 100644
index 000000000..fe2a52fab
--- /dev/null
+++ b/services/common/servicesComponents.manifest
@@ -0,0 +1,2 @@
+# Register resource aliases
+resource services-common resource://gre/modules/services-common/
diff --git a/services/common/stringbundle.js b/services/common/stringbundle.js
new file mode 100644
index 000000000..a07fa4831
--- /dev/null
+++ b/services/common/stringbundle.js
@@ -0,0 +1,203 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+this.EXPORTED_SYMBOLS = ["StringBundle"];
+
+var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu} = Components;
+
+/**
+ * A string bundle.
+ *
+ * This object presents two APIs: a deprecated one that is equivalent to the API
+ * for the stringbundle XBL binding, to make it easy to switch from that binding
+ * to this module, and a new one that is simpler and easier to use.
+ *
+ * The benefit of this module over the XBL binding is that it can also be used
+ * in JavaScript modules and components, not only in chrome JS.
+ *
+ * To use this module, import it, create a new instance of StringBundle,
+ * and then use the instance's |get| and |getAll| methods to retrieve strings
+ * (you can get both plain and formatted strings with |get|):
+ *
+ * let strings =
+ * new StringBundle("chrome://example/locale/strings.properties");
+ * let foo = strings.get("foo");
+ * let barFormatted = strings.get("bar", [arg1, arg2]);
+ * for (let string of strings.getAll())
+ * dump (string.key + " = " + string.value + "\n");
+ *
+ * @param url {String}
+ * the URL of the string bundle
+ */
+this.StringBundle = function StringBundle(url) {
+ this.url = url;
+}
+
+StringBundle.prototype = {
+ /**
+ * the locale associated with the application
+ * @type nsILocale
+ * @private
+ */
+ get _appLocale() {
+ try {
+ return Cc["@mozilla.org/intl/nslocaleservice;1"].
+ getService(Ci.nsILocaleService).
+ getApplicationLocale();
+ }
+ catch(ex) {
+ return null;
+ }
+ },
+
+ /**
+ * the wrapped nsIStringBundle
+ * @type nsIStringBundle
+ * @private
+ */
+ get _stringBundle() {
+ let stringBundle = Cc["@mozilla.org/intl/stringbundle;1"].
+ getService(Ci.nsIStringBundleService).
+ createBundle(this.url, this._appLocale);
+ this.__defineGetter__("_stringBundle", () => stringBundle);
+ return this._stringBundle;
+ },
+
+
+ // the new API
+
+ /**
+ * the URL of the string bundle
+ * @type String
+ */
+ _url: null,
+ get url() {
+ return this._url;
+ },
+ set url(newVal) {
+ this._url = newVal;
+ delete this._stringBundle;
+ },
+
+ /**
+ * Get a string from the bundle.
+ *
+ * @param key {String}
+ * the identifier of the string to get
+ * @param args {array} [optional]
+ * an array of arguments that replace occurrences of %S in the string
+ *
+ * @returns {String} the value of the string
+ */
+ get: function(key, args) {
+ if (args)
+ return this.stringBundle.formatStringFromName(key, args, args.length);
+ else
+ return this.stringBundle.GetStringFromName(key);
+ },
+
+ /**
+ * Get all the strings in the bundle.
+ *
+ * @returns {Array}
+ * an array of objects with key and value properties
+ */
+ getAll: function() {
+ let strings = [];
+
+ // FIXME: for performance, return an enumerable array that wraps the string
+ // bundle's nsISimpleEnumerator (does JavaScript already support this?).
+
+ let enumerator = this.stringBundle.getSimpleEnumeration();
+
+ while (enumerator.hasMoreElements()) {
+ // We could simply return the nsIPropertyElement objects, but I think
+ // it's better to return standard JS objects that behave as consumers
+ // expect JS objects to behave (f.e. you can modify them dynamically).
+ let string = enumerator.getNext().QueryInterface(Ci.nsIPropertyElement);
+ strings.push({ key: string.key, value: string.value });
+ }
+
+ return strings;
+ },
+
+
+ // the deprecated XBL binding-compatible API
+
+ /**
+ * the URL of the string bundle
+ * @deprecated because its name doesn't make sense outside of an XBL binding
+ * @type String
+ */
+ get src() {
+ return this.url;
+ },
+ set src(newVal) {
+ this.url = newVal;
+ },
+
+ /**
+ * the locale associated with the application
+ * @deprecated because it has never been used outside the XBL binding itself,
+ * and consumers should obtain it directly from the locale service anyway.
+ * @type nsILocale
+ */
+ get appLocale() {
+ return this._appLocale;
+ },
+
+ /**
+ * the wrapped nsIStringBundle
+ * @deprecated because this module should provide all necessary functionality
+ * @type nsIStringBundle
+ *
+ * If you do ever need to use this, let the authors of this module know why
+ * so they can surface functionality for your use case in the module itself
+ * and you don't have to access this underlying XPCOM component.
+ */
+ get stringBundle() {
+ return this._stringBundle;
+ },
+
+ /**
+ * Get a string from the bundle.
+ * @deprecated use |get| instead
+ *
+ * @param key {String}
+ * the identifier of the string to get
+ *
+ * @returns {String}
+ * the value of the string
+ */
+ getString: function(key) {
+ return this.get(key);
+ },
+
+ /**
+ * Get a formatted string from the bundle.
+ * @deprecated use |get| instead
+ *
+ * @param key {string}
+ * the identifier of the string to get
+ * @param args {array}
+ * an array of arguments that replace occurrences of %S in the string
+ *
+ * @returns {String}
+ * the formatted value of the string
+ */
+ getFormattedString: function(key, args) {
+ return this.get(key, args);
+ },
+
+ /**
+ * Get an enumeration of the strings in the bundle.
+ * @deprecated use |getAll| instead
+ *
+ * @returns {nsISimpleEnumerator}
+ * a enumeration of the strings in the bundle
+ */
+ get strings() {
+ return this.stringBundle.getSimpleEnumeration();
+ }
+}
diff --git a/services/common/tests/mach_commands.py b/services/common/tests/mach_commands.py
new file mode 100644
index 000000000..b57fa3aa2
--- /dev/null
+++ b/services/common/tests/mach_commands.py
@@ -0,0 +1,111 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, unicode_literals
+
+import mozpack.path as mozpath
+
+from mozbuild.base import (
+ MachCommandBase,
+)
+
+from mach.decorators import (
+ CommandArgument,
+ CommandProvider,
+ Command,
+)
+
+from mach.registrar import (
+ Registrar
+)
+
+from shutil import rmtree
+from subprocess import Popen
+from sys import argv
+from sys import exit
+from tempfile import mkdtemp
+
+
+
+DEFAULT_PORT = 8080
+DEFAULT_HOSTNAME = 'localhost'
+
+SRCDIR = mozpath.abspath(mozpath.dirname(__file__))
+
+STORAGE_SERVER_SCRIPT = mozpath.join(SRCDIR, 'run_storage_server.js')
+
+def SyncStorageCommand(func):
+ """Decorator that adds shared command arguments to services commands."""
+
+ port = CommandArgument('--port', metavar='PORT', type=int,
+ default=DEFAULT_PORT, help='Port to run server on.')
+ func = port(func)
+
+ address = CommandArgument('--address', metavar='ADDRESS',
+ default=DEFAULT_HOSTNAME,
+ help='Hostname to bind server to.')
+ func = address(func)
+
+ return func
+
+Registrar.register_category(name='services',
+ title='Services utilities',
+ description='Commands for services development.')
+
+@CommandProvider
+class SyncTestCommands(MachCommandBase):
+ def __init__(self, context):
+ MachCommandBase.__init__(self, context)
+
+ def run_server(self, js_file, hostname, port):
+ topsrcdir = self.topsrcdir
+ topobjdir = self.topobjdir
+
+ unit_test_dir = mozpath.join(SRCDIR, 'unit')
+
+ head_paths = [
+ 'head_global.js',
+ 'head_helpers.js',
+ 'head_http.js',
+ ]
+
+ head_paths = ['"%s"' % mozpath.join(unit_test_dir, path) for path in head_paths]
+
+ args = [
+ '%s/run-mozilla.sh' % self.bindir,
+ '%s/xpcshell' % self.bindir,
+ '-g', self.bindir,
+ '-a', self.bindir,
+ '-r', '%s/components/httpd.manifest' % self.bindir,
+ '-m',
+ '-s',
+ '-e', 'const _TESTING_MODULES_DIR = "%s/_tests/modules";' % topobjdir,
+ '-f', '%s/testing/xpcshell/head.js' % topsrcdir,
+ '-e', 'const _SERVER_ADDR = "%s";' % hostname,
+ '-e', 'const SERVER_PORT = "%s";' % port,
+ '-e', 'const INCLUDE_FILES = [%s];' % ', '.join(head_paths),
+ '-e', '_register_protocol_handlers();',
+ '-e', 'for (let name of INCLUDE_FILES) load(name);',
+ '-e', '_fakeIdleService.activate();',
+ '-f', js_file
+ ]
+
+ profile_dir = mkdtemp()
+ print 'Created profile directory: %s' % profile_dir
+
+ try:
+ env = {'XPCSHELL_TEST_PROFILE_DIR': profile_dir}
+ proc = Popen(args, env=env)
+
+ return proc.wait()
+
+ finally:
+ print 'Removing profile directory %s' % profile_dir
+ rmtree(profile_dir)
+
+ @Command('storage-server', category='services',
+ description='Run a storage server.')
+ @SyncStorageCommand
+ def run_storage_server(self, port=DEFAULT_PORT, address=DEFAULT_HOSTNAME):
+ exit(self.run_server(STORAGE_SERVER_SCRIPT, address, port))
diff --git a/services/common/tests/moz.build b/services/common/tests/moz.build
new file mode 100644
index 000000000..1c6c7e68c
--- /dev/null
+++ b/services/common/tests/moz.build
@@ -0,0 +1,11 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+XPCSHELL_TESTS_MANIFESTS += ['unit/xpcshell.ini']
+
+TEST_DIRS += [
+ 'unit'
+]
diff --git a/services/common/tests/run_storage_server.js b/services/common/tests/run_storage_server.js
new file mode 100644
index 000000000..a6a80f9d3
--- /dev/null
+++ b/services/common/tests/run_storage_server.js
@@ -0,0 +1,25 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * This file runs a Storage Service server.
+ *
+ * It is meant to be executed with an xpcshell.
+ *
+ * The Makefile in this directory contains a target to run it:
+ *
+ * $ make storage-server
+ */
+
+Cu.import("resource://testing-common/services/common/storageserver.js");
+
+initTestLogging();
+
+var server = new StorageServer();
+server.allowAllUsers = true;
+server.startSynchronous(SERVER_PORT);
+_("Storage server started on port " + SERVER_PORT);
+
+// Launch the thread manager.
+_do_main();
diff --git a/services/common/tests/unit/head_global.js b/services/common/tests/unit/head_global.js
new file mode 100644
index 000000000..4a829a82f
--- /dev/null
+++ b/services/common/tests/unit/head_global.js
@@ -0,0 +1,29 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+var {classes: Cc, interfaces: Ci, results: Cr, utils: Cu, manager: Cm} = Components;
+
+var gSyncProfile = do_get_profile();
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+
+Cu.import("resource://testing-common/AppInfo.jsm", this);
+updateAppInfo({
+ name: "XPCShell",
+ ID: "xpcshell@tests.mozilla.org",
+ version: "1",
+ platformVersion: "",
+});
+
+function addResourceAlias() {
+ Cu.import("resource://gre/modules/Services.jsm");
+ const handler = Services.io.getProtocolHandler("resource")
+ .QueryInterface(Ci.nsIResProtocolHandler);
+
+ let modules = ["common", "crypto"];
+ for (let module of modules) {
+ let uri = Services.io.newURI("resource://gre/modules/services-" + module + "/",
+ null, null);
+ handler.setSubstitution("services-" + module, uri);
+ }
+}
+addResourceAlias();
diff --git a/services/common/tests/unit/head_helpers.js b/services/common/tests/unit/head_helpers.js
new file mode 100644
index 000000000..b54045ec1
--- /dev/null
+++ b/services/common/tests/unit/head_helpers.js
@@ -0,0 +1,172 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://testing-common/httpd.js");
+Cu.import("resource://testing-common/services/common/logging.js");
+Cu.import("resource://testing-common/MockRegistrar.jsm");
+
+var btoa = Cu.import("resource://gre/modules/Log.jsm").btoa;
+var atob = Cu.import("resource://gre/modules/Log.jsm").atob;
+
+function do_check_empty(obj) {
+ do_check_attribute_count(obj, 0);
+}
+
+function do_check_attribute_count(obj, c) {
+ do_check_eq(c, Object.keys(obj).length);
+}
+
+function do_check_throws(aFunc, aResult, aStack) {
+ if (!aStack) {
+ try {
+ // We might not have a 'Components' object.
+ aStack = Components.stack.caller;
+ } catch (e) {}
+ }
+
+ try {
+ aFunc();
+ } catch (e) {
+ do_check_eq(e.result, aResult, aStack);
+ return;
+ }
+ do_throw("Expected result " + aResult + ", none thrown.", aStack);
+}
+
+
+/**
+ * Test whether specified function throws exception with expected
+ * result.
+ *
+ * @param func
+ * Function to be tested.
+ * @param message
+ * Message of expected exception. <code>null</code> for no throws.
+ */
+function do_check_throws_message(aFunc, aResult) {
+ try {
+ aFunc();
+ } catch (e) {
+ do_check_eq(e.message, aResult);
+ return;
+ }
+ do_throw("Expected an error, none thrown.");
+}
+
+/**
+ * Print some debug message to the console. All arguments will be printed,
+ * separated by spaces.
+ *
+ * @param [arg0, arg1, arg2, ...]
+ * Any number of arguments to print out
+ * @usage _("Hello World") -> prints "Hello World"
+ * @usage _(1, 2, 3) -> prints "1 2 3"
+ */
+var _ = function(some, debug, text, to) {
+ print(Array.slice(arguments).join(" "));
+};
+
+function httpd_setup (handlers, port=-1) {
+ let server = new HttpServer();
+ for (let path in handlers) {
+ server.registerPathHandler(path, handlers[path]);
+ }
+ try {
+ server.start(port);
+ } catch (ex) {
+ _("==========================================");
+ _("Got exception starting HTTP server on port " + port);
+ _("Error: " + Log.exceptionStr(ex));
+ _("Is there a process already listening on port " + port + "?");
+ _("==========================================");
+ do_throw(ex);
+ }
+
+ // Set the base URI for convenience.
+ let i = server.identity;
+ server.baseURI = i.primaryScheme + "://" + i.primaryHost + ":" + i.primaryPort;
+
+ return server;
+}
+
+function httpd_handler(statusCode, status, body) {
+ return function handler(request, response) {
+ _("Processing request");
+ // Allow test functions to inspect the request.
+ request.body = readBytesFromInputStream(request.bodyInputStream);
+ handler.request = request;
+
+ response.setStatusLine(request.httpVersion, statusCode, status);
+ if (body) {
+ response.bodyOutputStream.write(body, body.length);
+ }
+ };
+}
+
+/*
+ * Read bytes string from an nsIInputStream. If 'count' is omitted,
+ * all available input is read.
+ */
+function readBytesFromInputStream(inputStream, count) {
+ return CommonUtils.readBytesFromInputStream(inputStream, count);
+}
+
+/*
+ * Ensure exceptions from inside callbacks leads to test failures.
+ */
+function ensureThrows(func) {
+ return function() {
+ try {
+ func.apply(this, arguments);
+ } catch (ex) {
+ do_throw(ex);
+ }
+ };
+}
+
+/**
+ * Proxy auth helpers.
+ */
+
+/**
+ * Fake a PAC to prompt a channel replacement.
+ */
+var PACSystemSettings = {
+ QueryInterface: XPCOMUtils.generateQI([Ci.nsISystemProxySettings]),
+
+ // Replace this URI for each test to avoid caching. We want to ensure that
+ // each test gets a completely fresh setup.
+ mainThreadOnly: true,
+ PACURI: null,
+ getProxyForURI: function getProxyForURI(aURI) {
+ throw Cr.NS_ERROR_NOT_IMPLEMENTED;
+ }
+};
+
+var fakePACCID;
+function installFakePAC() {
+ _("Installing fake PAC.");
+ fakePACCID = MockRegistrar.register("@mozilla.org/system-proxy-settings;1",
+ PACSystemSettings);
+}
+
+function uninstallFakePAC() {
+ _("Uninstalling fake PAC.");
+ MockRegistrar.unregister(fakePACCID);
+}
+
+// Many tests do service.startOver() and don't expect the provider type to
+// change (whereas by default, a startOver will do exactly that so FxA is
+// subsequently used). The tests that know how to deal with
+// the Firefox Accounts identity hack things to ensure that still works.
+function ensureStartOverKeepsIdentity() {
+ Cu.import("resource://gre/modules/Services.jsm");
+ Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", true);
+ do_register_cleanup(function() {
+ Services.prefs.clearUserPref("services.sync-testing.startOverKeepIdentity");
+ });
+}
+ensureStartOverKeepsIdentity();
diff --git a/services/common/tests/unit/head_http.js b/services/common/tests/unit/head_http.js
new file mode 100644
index 000000000..f590e86cb
--- /dev/null
+++ b/services/common/tests/unit/head_http.js
@@ -0,0 +1,29 @@
+ /* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+
+function basic_auth_header(user, password) {
+ return "Basic " + btoa(user + ":" + CommonUtils.encodeUTF8(password));
+}
+
+function basic_auth_matches(req, user, password) {
+ if (!req.hasHeader("Authorization")) {
+ return false;
+ }
+
+ let expected = basic_auth_header(user, CommonUtils.encodeUTF8(password));
+ return req.getHeader("Authorization") == expected;
+}
+
+function httpd_basic_auth_handler(body, metadata, response) {
+ if (basic_auth_matches(metadata, "guest", "guest")) {
+ response.setStatusLine(metadata.httpVersion, 200, "OK, authorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ } else {
+ body = "This path exists and is protected - failed";
+ response.setStatusLine(metadata.httpVersion, 401, "Unauthorized");
+ response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
+ }
+ response.bodyOutputStream.write(body, body.length);
+}
diff --git a/services/common/tests/unit/moz.build b/services/common/tests/unit/moz.build
new file mode 100644
index 000000000..a110d66e2
--- /dev/null
+++ b/services/common/tests/unit/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+TEST_DIRS += [
+ 'test_blocklist_signatures'
+]
diff --git a/services/common/tests/unit/test_async_chain.js b/services/common/tests/unit/test_async_chain.js
new file mode 100644
index 000000000..c3abef296
--- /dev/null
+++ b/services/common/tests/unit/test_async_chain.js
@@ -0,0 +1,30 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/async.js");
+
+function run_test() {
+ _("Chain a few async methods, making sure the 'this' object is correct.");
+
+ let methods = {
+ save: function(x, callback) {
+ this.x = x;
+ callback(x);
+ },
+ addX: function(x, callback) {
+ callback(x + this.x);
+ },
+ double: function(x, callback) {
+ callback(x * 2);
+ },
+ neg: function(x, callback) {
+ callback(-x);
+ }
+ };
+ methods.chain = Async.chain;
+
+ // ((1 + 1 + 1) * (-1) + 1) * 2 + 1 = -3
+ methods.chain(methods.save, methods.addX, methods.addX, methods.neg,
+ methods.addX, methods.double, methods.addX, methods.save)(1);
+ do_check_eq(methods.x, -3);
+}
diff --git a/services/common/tests/unit/test_async_querySpinningly.js b/services/common/tests/unit/test_async_querySpinningly.js
new file mode 100644
index 000000000..8c63fe33c
--- /dev/null
+++ b/services/common/tests/unit/test_async_querySpinningly.js
@@ -0,0 +1,103 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-common/utils.js");
+
+_("Make sure querySpinningly will synchronously fetch rows for a query asyncly");
+
+const SQLITE_CONSTRAINT_VIOLATION = 19; // http://www.sqlite.org/c3ref/c_abort.html
+
+var Svc = {};
+XPCOMUtils.defineLazyServiceGetter(Svc, "Form",
+ "@mozilla.org/satchel/form-history;1",
+ "nsIFormHistory2");
+
+function querySpinningly(query, names) {
+ let q = Svc.Form.DBConnection.createStatement(query);
+ let r = Async.querySpinningly(q, names);
+ q.finalize();
+ return r;
+}
+
+function run_test() {
+ initTestLogging("Trace");
+
+ _("Make sure the call is async and allows other events to process");
+ let isAsync = false;
+ CommonUtils.nextTick(function() { isAsync = true; });
+ do_check_false(isAsync);
+
+ _("Empty out the formhistory table");
+ let r0 = querySpinningly("DELETE FROM moz_formhistory");
+ do_check_eq(r0, null);
+
+ _("Make sure there's nothing there");
+ let r1 = querySpinningly("SELECT 1 FROM moz_formhistory");
+ do_check_eq(r1, null);
+
+ _("Insert a row");
+ let r2 = querySpinningly("INSERT INTO moz_formhistory (fieldname, value) VALUES ('foo', 'bar')");
+ do_check_eq(r2, null);
+
+ _("Request a known value for the one row");
+ let r3 = querySpinningly("SELECT 42 num FROM moz_formhistory", ["num"]);
+ do_check_eq(r3.length, 1);
+ do_check_eq(r3[0].num, 42);
+
+ _("Get multiple columns");
+ let r4 = querySpinningly("SELECT fieldname, value FROM moz_formhistory", ["fieldname", "value"]);
+ do_check_eq(r4.length, 1);
+ do_check_eq(r4[0].fieldname, "foo");
+ do_check_eq(r4[0].value, "bar");
+
+ _("Get multiple columns with a different order");
+ let r5 = querySpinningly("SELECT fieldname, value FROM moz_formhistory", ["value", "fieldname"]);
+ do_check_eq(r5.length, 1);
+ do_check_eq(r5[0].fieldname, "foo");
+ do_check_eq(r5[0].value, "bar");
+
+ _("Add multiple entries (sqlite doesn't support multiple VALUES)");
+ let r6 = querySpinningly("INSERT INTO moz_formhistory (fieldname, value) SELECT 'foo', 'baz' UNION SELECT 'more', 'values'");
+ do_check_eq(r6, null);
+
+ _("Get multiple rows");
+ let r7 = querySpinningly("SELECT fieldname, value FROM moz_formhistory WHERE fieldname = 'foo'", ["fieldname", "value"]);
+ do_check_eq(r7.length, 2);
+ do_check_eq(r7[0].fieldname, "foo");
+ do_check_eq(r7[1].fieldname, "foo");
+
+ _("Make sure updates work");
+ let r8 = querySpinningly("UPDATE moz_formhistory SET value = 'updated' WHERE fieldname = 'more'");
+ do_check_eq(r8, null);
+
+ _("Get the updated");
+ let r9 = querySpinningly("SELECT value, fieldname FROM moz_formhistory WHERE fieldname = 'more'", ["fieldname", "value"]);
+ do_check_eq(r9.length, 1);
+ do_check_eq(r9[0].fieldname, "more");
+ do_check_eq(r9[0].value, "updated");
+
+ _("Grabbing fewer fields than queried is fine");
+ let r10 = querySpinningly("SELECT value, fieldname FROM moz_formhistory", ["fieldname"]);
+ do_check_eq(r10.length, 3);
+
+ _("Generate an execution error");
+ let query = "INSERT INTO moz_formhistory (fieldname, value) VALUES ('one', NULL)";
+ let stmt = Svc.Form.DBConnection.createStatement(query);
+ let r11, except; ;
+ try {
+ r11 = Async.querySpinningly(stmt);
+ } catch(e) {
+ except = e;
+ }
+ stmt.finalize()
+ do_check_true(!!except);
+ do_check_eq(except.result, SQLITE_CONSTRAINT_VIOLATION);
+
+ _("Cleaning up");
+ querySpinningly("DELETE FROM moz_formhistory");
+
+ _("Make sure the timeout got to run before this function ends");
+ do_check_true(isAsync);
+}
diff --git a/services/common/tests/unit/test_blocklist_certificates.js b/services/common/tests/unit/test_blocklist_certificates.js
new file mode 100644
index 000000000..e85970321
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_certificates.js
@@ -0,0 +1,224 @@
+const { Constructor: CC } = Components;
+
+Cu.import("resource://testing-common/httpd.js");
+
+const { OneCRLBlocklistClient } = Cu.import("resource://services-common/blocklist-clients.js");
+const { loadKinto } = Cu.import("resource://services-common/kinto-offline-client.js");
+
+const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream", "setInputStream");
+
+let server;
+
+// set up what we need to make storage adapters
+const Kinto = loadKinto();
+const FirefoxAdapter = Kinto.adapters.FirefoxAdapter;
+const kintoFilename = "kinto.sqlite";
+
+let kintoClient;
+
+function do_get_kinto_collection(collectionName) {
+ if (!kintoClient) {
+ let config = {
+ // Set the remote to be some server that will cause test failure when
+ // hit since we should never hit the server directly, only via maybeSync()
+ remote: "https://firefox.settings.services.mozilla.com/v1/",
+ // Set up the adapter and bucket as normal
+ adapter: FirefoxAdapter,
+ bucket: "blocklists"
+ };
+ kintoClient = new Kinto(config);
+ }
+ return kintoClient.collection(collectionName);
+}
+
+// Some simple tests to demonstrate that the logic inside maybeSync works
+// correctly and that simple kinto operations are working as expected. There
+// are more tests for core Kinto.js (and its storage adapter) in the
+// xpcshell tests under /services/common
+add_task(function* test_something(){
+ const configPath = "/v1/";
+ const recordsPath = "/v1/buckets/blocklists/collections/certificates/records";
+
+ Services.prefs.setCharPref("services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`);
+
+ // register a handler
+ function handleResponse (request, response) {
+ try {
+ const sample = getSampleResponse(request, server.identity.primaryPort);
+ if (!sample) {
+ do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
+ }
+
+ response.setStatusLine(null, sample.status.status,
+ sample.status.statusText);
+ // send the headers
+ for (let headerLine of sample.sampleHeaders) {
+ let headerElements = headerLine.split(':');
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+ response.setHeader("Date", (new Date()).toUTCString());
+
+ response.write(sample.responseBody);
+ } catch (e) {
+ do_print(e);
+ }
+ }
+ server.registerPathHandler(configPath, handleResponse);
+ server.registerPathHandler(recordsPath, handleResponse);
+
+ // Test an empty db populates
+ let result = yield OneCRLBlocklistClient.maybeSync(2000, Date.now());
+
+ // Open the collection, verify it's been populated:
+ // Our test data has a single record; it should be in the local collection
+ let collection = do_get_kinto_collection("certificates");
+ yield collection.db.open();
+ let list = yield collection.list();
+ do_check_eq(list.data.length, 1);
+ yield collection.db.close();
+
+ // Test the db is updated when we call again with a later lastModified value
+ result = yield OneCRLBlocklistClient.maybeSync(4000, Date.now());
+
+ // Open the collection, verify it's been updated:
+ // Our test data now has two records; both should be in the local collection
+ collection = do_get_kinto_collection("certificates");
+ yield collection.db.open();
+ list = yield collection.list();
+ do_check_eq(list.data.length, 3);
+ yield collection.db.close();
+
+ // Try to maybeSync with the current lastModified value - no connection
+ // should be attempted.
+ // Clear the kinto base pref so any connections will cause a test failure
+ Services.prefs.clearUserPref("services.settings.server");
+ yield OneCRLBlocklistClient.maybeSync(4000, Date.now());
+
+ // Try again with a lastModified value at some point in the past
+ yield OneCRLBlocklistClient.maybeSync(3000, Date.now());
+
+ // Check the OneCRL check time pref is modified, even if the collection
+ // hasn't changed
+ Services.prefs.setIntPref("services.blocklist.onecrl.checked", 0);
+ yield OneCRLBlocklistClient.maybeSync(3000, Date.now());
+ let newValue = Services.prefs.getIntPref("services.blocklist.onecrl.checked");
+ do_check_neq(newValue, 0);
+
+ // Check that a sync completes even when there's bad data in the
+ // collection. This will throw on fail, so just calling maybeSync is an
+ // acceptible test.
+ Services.prefs.setCharPref("services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`);
+ yield OneCRLBlocklistClient.maybeSync(5000, Date.now());
+});
+
+function run_test() {
+ // Ensure that signature verification is disabled to prevent interference
+ // with basic certificate sync tests
+ Services.prefs.setBoolPref("services.blocklist.signing.enforced", false);
+
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ run_next_test();
+
+ do_register_cleanup(function() {
+ server.stop(() => { });
+ });
+}
+
+// get a response for a given request from sample data
+function getSampleResponse(req, port) {
+ const responses = {
+ "OPTIONS": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Headers: Content-Length,Expires,Backoff,Retry-After,Last-Modified,Total-Records,ETag,Pragma,Cache-Control,authorization,content-type,if-none-match,Alert,Next-Page",
+ "Access-Control-Allow-Methods: GET,HEAD,OPTIONS,POST,DELETE,OPTIONS",
+ "Access-Control-Allow-Origin: *",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": "null"
+ },
+ "GET:/v1/?": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"settings":{"batch_max_requests":25}, "url":`http://localhost:${port}/v1/`, "documentation":"https://kinto.readthedocs.org/", "version":"1.5.1", "commit":"cbc6f58", "hello":"kinto"})
+ },
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"3000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "issuerName": "MEQxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwx0aGF3dGUsIEluYy4xHjAcBgNVBAMTFXRoYXd0ZSBFViBTU0wgQ0EgLSBHMw==",
+ "serialNumber":"CrTHPEE6AZSfI3jysin2bA==",
+ "id":"78cf8900-fdea-4ce5-f8fb-b78710617718",
+ "last_modified":3000
+ }]})
+ },
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=3000": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"4000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "issuerName":"MFkxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xKjAoBgNVBAMTIVN0YWF0IGRlciBOZWRlcmxhbmRlbiBPdmVyaGVpZCBDQQ",
+ "serialNumber":"ATFpsA==",
+ "id":"dabafde9-df4a-ddba-2548-748da04cc02c",
+ "last_modified":4000
+ },{
+ "subject":"MCIxIDAeBgNVBAMMF0Fub3RoZXIgVGVzdCBFbmQtZW50aXR5",
+ "pubKeyHash":"VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8=",
+ "id":"dabafde9-df4a-ddba-2548-748da04cc02d",
+ "last_modified":4000
+ }]})
+ },
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=4000": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"5000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "issuerName":"not a base64 encoded issuer",
+ "serialNumber":"not a base64 encoded serial",
+ "id":"dabafde9-df4a-ddba-2548-748da04cc02e",
+ "last_modified":5000
+ },{
+ "subject":"not a base64 encoded subject",
+ "pubKeyHash":"not a base64 encoded pubKeyHash",
+ "id":"dabafde9-df4a-ddba-2548-748da04cc02f",
+ "last_modified":5000
+ },{
+ "subject":"MCIxIDAeBgNVBAMMF0Fub3RoZXIgVGVzdCBFbmQtZW50aXR5",
+ "pubKeyHash":"VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8=",
+ "id":"dabafde9-df4a-ddba-2548-748da04cc02g",
+ "last_modified":5000
+ }]})
+ }
+ };
+ return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+ responses[req.method];
+
+}
diff --git a/services/common/tests/unit/test_blocklist_clients.js b/services/common/tests/unit/test_blocklist_clients.js
new file mode 100644
index 000000000..121fac926
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_clients.js
@@ -0,0 +1,412 @@
+const { Constructor: CC } = Components;
+
+const KEY_PROFILEDIR = "ProfD";
+
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://testing-common/httpd.js");
+Cu.import("resource://gre/modules/Timer.jsm");
+const { FileUtils } = Cu.import("resource://gre/modules/FileUtils.jsm");
+const { OS } = Cu.import("resource://gre/modules/osfile.jsm");
+
+const { loadKinto } = Cu.import("resource://services-common/kinto-offline-client.js");
+const BlocklistClients = Cu.import("resource://services-common/blocklist-clients.js");
+
+const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream", "setInputStream");
+
+const gBlocklistClients = [
+ {client: BlocklistClients.AddonBlocklistClient, filename: BlocklistClients.FILENAME_ADDONS_JSON, testData: ["i808","i720", "i539"]},
+ {client: BlocklistClients.PluginBlocklistClient, filename: BlocklistClients.FILENAME_PLUGINS_JSON, testData: ["p1044","p32","p28"]},
+ {client: BlocklistClients.GfxBlocklistClient, filename: BlocklistClients.FILENAME_GFX_JSON, testData: ["g204","g200","g36"]},
+];
+
+
+let server;
+let kintoClient;
+
+function kintoCollection(collectionName) {
+ if (!kintoClient) {
+ const Kinto = loadKinto();
+ const FirefoxAdapter = Kinto.adapters.FirefoxAdapter;
+ const config = {
+ // Set the remote to be some server that will cause test failure when
+ // hit since we should never hit the server directly, only via maybeSync()
+ remote: "https://firefox.settings.services.mozilla.com/v1/",
+ adapter: FirefoxAdapter,
+ bucket: "blocklists"
+ };
+ kintoClient = new Kinto(config);
+ }
+ return kintoClient.collection(collectionName);
+}
+
+function* readJSON(filepath) {
+ const binaryData = yield OS.File.read(filepath);
+ const textData = (new TextDecoder()).decode(binaryData);
+ return Promise.resolve(JSON.parse(textData));
+}
+
+function* clear_state() {
+ for (let {client} of gBlocklistClients) {
+ // Remove last server times.
+ Services.prefs.clearUserPref(client.lastCheckTimePref);
+
+ // Clear local DB.
+ const collection = kintoCollection(client.collectionName);
+ try {
+ yield collection.db.open();
+ yield collection.clear();
+ } finally {
+ yield collection.db.close();
+ }
+ }
+
+ // Remove profile data.
+ for (let {filename} of gBlocklistClients) {
+ const blocklist = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
+ if (blocklist.exists()) {
+ blocklist.remove(true);
+ }
+ }
+}
+
+
+function run_test() {
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ // Point the blocklist clients to use this local HTTP server.
+ Services.prefs.setCharPref("services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`);
+
+ // Setup server fake responses.
+ function handleResponse(request, response) {
+ try {
+ const sample = getSampleResponse(request, server.identity.primaryPort);
+ if (!sample) {
+ do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
+ }
+
+ response.setStatusLine(null, sample.status.status,
+ sample.status.statusText);
+ // send the headers
+ for (let headerLine of sample.sampleHeaders) {
+ let headerElements = headerLine.split(':');
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+ response.setHeader("Date", (new Date()).toUTCString());
+
+ response.write(sample.responseBody);
+ response.finish();
+ } catch (e) {
+ do_print(e);
+ }
+ }
+ const configPath = "/v1/";
+ const addonsRecordsPath = "/v1/buckets/blocklists/collections/addons/records";
+ const gfxRecordsPath = "/v1/buckets/blocklists/collections/gfx/records";
+ const pluginsRecordsPath = "/v1/buckets/blocklists/collections/plugins/records";
+ server.registerPathHandler(configPath, handleResponse);
+ server.registerPathHandler(addonsRecordsPath, handleResponse);
+ server.registerPathHandler(gfxRecordsPath, handleResponse);
+ server.registerPathHandler(pluginsRecordsPath, handleResponse);
+
+
+ run_next_test();
+
+ do_register_cleanup(function() {
+ server.stop(() => { });
+ });
+}
+
+add_task(function* test_records_obtained_from_server_are_stored_in_db(){
+ for (let {client} of gBlocklistClients) {
+ // Test an empty db populates
+ let result = yield client.maybeSync(2000, Date.now());
+
+ // Open the collection, verify it's been populated:
+ // Our test data has a single record; it should be in the local collection
+ let collection = kintoCollection(client.collectionName);
+ yield collection.db.open();
+ let list = yield collection.list();
+ equal(list.data.length, 1);
+ yield collection.db.close();
+ }
+});
+add_task(clear_state);
+
+add_task(function* test_list_is_written_to_file_in_profile(){
+ for (let {client, filename, testData} of gBlocklistClients) {
+ const profFile = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
+ strictEqual(profFile.exists(), false);
+
+ let result = yield client.maybeSync(2000, Date.now());
+
+ strictEqual(profFile.exists(), true);
+ const content = yield readJSON(profFile.path);
+ equal(content.data[0].blockID, testData[testData.length - 1]);
+ }
+});
+add_task(clear_state);
+
+add_task(function* test_current_server_time_is_saved_in_pref(){
+ for (let {client} of gBlocklistClients) {
+ const before = Services.prefs.getIntPref(client.lastCheckTimePref);
+ const serverTime = Date.now();
+ yield client.maybeSync(2000, serverTime);
+ const after = Services.prefs.getIntPref(client.lastCheckTimePref);
+ equal(after, Math.round(serverTime / 1000));
+ }
+});
+add_task(clear_state);
+
+add_task(function* test_update_json_file_when_addons_has_changes(){
+ for (let {client, filename, testData} of gBlocklistClients) {
+ yield client.maybeSync(2000, Date.now() - 1000);
+ const before = Services.prefs.getIntPref(client.lastCheckTimePref);
+ const profFile = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
+ const fileLastModified = profFile.lastModifiedTime = profFile.lastModifiedTime - 1000;
+ const serverTime = Date.now();
+
+ yield client.maybeSync(3001, serverTime);
+
+ // File was updated.
+ notEqual(fileLastModified, profFile.lastModifiedTime);
+ const content = yield readJSON(profFile.path);
+ deepEqual(content.data.map((r) => r.blockID), testData);
+ // Server time was updated.
+ const after = Services.prefs.getIntPref(client.lastCheckTimePref);
+ equal(after, Math.round(serverTime / 1000));
+ }
+});
+add_task(clear_state);
+
+add_task(function* test_sends_reload_message_when_blocklist_has_changes(){
+ for (let {client, filename} of gBlocklistClients) {
+ let received = yield new Promise((resolve, reject) => {
+ Services.ppmm.addMessageListener("Blocklist:reload-from-disk", {
+ receiveMessage(aMsg) { resolve(aMsg) }
+ });
+
+ client.maybeSync(2000, Date.now() - 1000);
+ });
+
+ equal(received.data.filename, filename);
+ }
+});
+add_task(clear_state);
+
+add_task(function* test_do_nothing_when_blocklist_is_up_to_date(){
+ for (let {client, filename} of gBlocklistClients) {
+ yield client.maybeSync(2000, Date.now() - 1000);
+ const before = Services.prefs.getIntPref(client.lastCheckTimePref);
+ const profFile = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
+ const fileLastModified = profFile.lastModifiedTime = profFile.lastModifiedTime - 1000;
+ const serverTime = Date.now();
+
+ yield client.maybeSync(3000, serverTime);
+
+ // File was not updated.
+ equal(fileLastModified, profFile.lastModifiedTime);
+ // Server time was updated.
+ const after = Services.prefs.getIntPref(client.lastCheckTimePref);
+ equal(after, Math.round(serverTime / 1000));
+ }
+});
+add_task(clear_state);
+
+
+
+// get a response for a given request from sample data
+function getSampleResponse(req, port) {
+ const responses = {
+ "OPTIONS": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Headers: Content-Length,Expires,Backoff,Retry-After,Last-Modified,Total-Records,ETag,Pragma,Cache-Control,authorization,content-type,if-none-match,Alert,Next-Page",
+ "Access-Control-Allow-Methods: GET,HEAD,OPTIONS,POST,DELETE,OPTIONS",
+ "Access-Control-Allow-Origin: *",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": "null"
+ },
+ "GET:/v1/?": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"settings":{"batch_max_requests":25}, "url":`http://localhost:${port}/v1/`, "documentation":"https://kinto.readthedocs.org/", "version":"1.5.1", "commit":"cbc6f58", "hello":"kinto"})
+ },
+ "GET:/v1/buckets/blocklists/collections/addons/records?_sort=-last_modified": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"3000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "prefs": [],
+ "blockID": "i539",
+ "last_modified": 3000,
+ "versionRange": [{
+ "targetApplication": [],
+ "maxVersion": "*",
+ "minVersion": "0",
+ "severity": "1"
+ }],
+ "guid": "ScorpionSaver@jetpack",
+ "id": "9d500963-d80e-3a91-6e74-66f3811b99cc"
+ }]})
+ },
+ "GET:/v1/buckets/blocklists/collections/plugins/records?_sort=-last_modified": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"3000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "matchFilename": "NPFFAddOn.dll",
+ "blockID": "p28",
+ "id": "7b1e0b3c-e390-a817-11b6-a6887f65f56e",
+ "last_modified": 3000,
+ "versionRange": []
+ }]})
+ },
+ "GET:/v1/buckets/blocklists/collections/gfx/records?_sort=-last_modified": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"3000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "driverVersionComparator": "LESS_THAN_OR_EQUAL",
+ "driverVersion": "8.17.12.5896",
+ "vendor": "0x10de",
+ "blockID": "g36",
+ "feature": "DIRECT3D_9_LAYERS",
+ "devices": ["0x0a6c"],
+ "featureStatus": "BLOCKED_DRIVER_VERSION",
+ "last_modified": 3000,
+ "os": "WINNT 6.1",
+ "id": "3f947f16-37c2-4e96-d356-78b26363729b"
+ }]})
+ },
+ "GET:/v1/buckets/blocklists/collections/addons/records?_sort=-last_modified&_since=3000": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"4000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "prefs": [],
+ "blockID": "i808",
+ "last_modified": 4000,
+ "versionRange": [{
+ "targetApplication": [],
+ "maxVersion": "*",
+ "minVersion": "0",
+ "severity": "3"
+ }],
+ "guid": "{c96d1ae6-c4cf-4984-b110-f5f561b33b5a}",
+ "id": "9ccfac91-e463-c30c-f0bd-14143794a8dd"
+ }, {
+ "prefs": ["browser.startup.homepage"],
+ "blockID": "i720",
+ "last_modified": 3500,
+ "versionRange": [{
+ "targetApplication": [],
+ "maxVersion": "*",
+ "minVersion": "0",
+ "severity": "1"
+ }],
+ "guid": "FXqG@xeeR.net",
+ "id": "cf9b3129-a97e-dbd7-9525-a8575ac03c25"
+ }]})
+ },
+ "GET:/v1/buckets/blocklists/collections/plugins/records?_sort=-last_modified&_since=3000": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"4000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "infoURL": "https://get.adobe.com/flashplayer/",
+ "blockID": "p1044",
+ "matchFilename": "libflashplayer\\.so",
+ "last_modified": 4000,
+ "versionRange": [{
+ "targetApplication": [],
+ "minVersion": "11.2.202.509",
+ "maxVersion": "11.2.202.539",
+ "severity": "0",
+ "vulnerabilityStatus": "1"
+ }],
+ "os": "Linux",
+ "id": "aabad965-e556-ffe7-4191-074f5dee3df3"
+ }, {
+ "matchFilename": "npViewpoint.dll",
+ "blockID": "p32",
+ "id": "1f48af42-c508-b8ef-b8d5-609d48e4f6c9",
+ "last_modified": 3500,
+ "versionRange": [{
+ "targetApplication": [{
+ "minVersion": "3.0",
+ "guid": "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+ "maxVersion": "*"
+ }]
+ }]
+ }]})
+ },
+ "GET:/v1/buckets/blocklists/collections/gfx/records?_sort=-last_modified&_since=3000": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"4000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{
+ "vendor": "0x8086",
+ "blockID": "g204",
+ "feature": "WEBGL_MSAA",
+ "devices": [],
+ "id": "c96bca82-e6bd-044d-14c4-9c1d67e9283a",
+ "last_modified": 4000,
+ "os": "Darwin 10",
+ "featureStatus": "BLOCKED_DEVICE"
+ }, {
+ "vendor": "0x10de",
+ "blockID": "g200",
+ "feature": "WEBGL_MSAA",
+ "devices": [],
+ "id": "c3a15ba9-e0e2-421f-e399-c995e5b8d14e",
+ "last_modified": 3500,
+ "os": "Darwin 11",
+ "featureStatus": "BLOCKED_DEVICE"
+ }]})
+ }
+ };
+ return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+ responses[req.method];
+
+}
diff --git a/services/common/tests/unit/test_blocklist_signatures.js b/services/common/tests/unit/test_blocklist_signatures.js
new file mode 100644
index 000000000..b2ee1019a
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_signatures.js
@@ -0,0 +1,510 @@
+"use strict";
+
+Cu.import("resource://services-common/blocklist-updater.js");
+Cu.import("resource://testing-common/httpd.js");
+
+const { loadKinto } = Cu.import("resource://services-common/kinto-offline-client.js");
+const { NetUtil } = Cu.import("resource://gre/modules/NetUtil.jsm", {});
+const { OneCRLBlocklistClient } = Cu.import("resource://services-common/blocklist-clients.js");
+
+let server;
+
+const PREF_BLOCKLIST_BUCKET = "services.blocklist.bucket";
+const PREF_BLOCKLIST_ENFORCE_SIGNING = "services.blocklist.signing.enforced";
+const PREF_BLOCKLIST_ONECRL_COLLECTION = "services.blocklist.onecrl.collection";
+const PREF_SETTINGS_SERVER = "services.settings.server";
+const PREF_SIGNATURE_ROOT = "security.content.signature.root_hash";
+
+
+const CERT_DIR = "test_blocklist_signatures/";
+const CHAIN_FILES =
+ ["collection_signing_ee.pem",
+ "collection_signing_int.pem",
+ "collection_signing_root.pem"];
+
+function getFileData(file) {
+ const stream = Cc["@mozilla.org/network/file-input-stream;1"]
+ .createInstance(Ci.nsIFileInputStream);
+ stream.init(file, -1, 0, 0);
+ const data = NetUtil.readInputStreamToString(stream, stream.available());
+ stream.close();
+ return data;
+}
+
+function setRoot() {
+ const filename = CERT_DIR + CHAIN_FILES[0];
+
+ const certFile = do_get_file(filename, false);
+ const b64cert = getFileData(certFile)
+ .replace(/-----BEGIN CERTIFICATE-----/, "")
+ .replace(/-----END CERTIFICATE-----/, "")
+ .replace(/[\r\n]/g, "");
+ const certdb = Cc["@mozilla.org/security/x509certdb;1"]
+ .getService(Ci.nsIX509CertDB);
+ const cert = certdb.constructX509FromBase64(b64cert);
+ Services.prefs.setCharPref(PREF_SIGNATURE_ROOT, cert.sha256Fingerprint);
+}
+
+function getCertChain() {
+ const chain = [];
+ for (let file of CHAIN_FILES) {
+ chain.push(getFileData(do_get_file(CERT_DIR + file)));
+ }
+ return chain.join("\n");
+}
+
+function* checkRecordCount(count) {
+ // open the collection manually
+ const base = Services.prefs.getCharPref(PREF_SETTINGS_SERVER);
+ const bucket = Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET);
+ const collectionName =
+ Services.prefs.getCharPref(PREF_BLOCKLIST_ONECRL_COLLECTION);
+
+ const Kinto = loadKinto();
+
+ const FirefoxAdapter = Kinto.adapters.FirefoxAdapter;
+
+ const config = {
+ remote: base,
+ bucket: bucket,
+ adapter: FirefoxAdapter,
+ };
+
+ const db = new Kinto(config);
+ const collection = db.collection(collectionName);
+
+ yield collection.db.open();
+
+ // Check we have the expected number of records
+ let records = yield collection.list();
+ do_check_eq(count, records.data.length);
+
+ // Close the collection so the test can exit cleanly
+ yield collection.db.close();
+}
+
+// Check to ensure maybeSync is called with correct values when a changes
+// document contains information on when a collection was last modified
+add_task(function* test_check_signatures(){
+ const port = server.identity.primaryPort;
+
+ // a response to give the client when the cert chain is expected
+ function makeMetaResponseBody(lastModified, signature) {
+ return {
+ data: {
+ id: "certificates",
+ last_modified: lastModified,
+ signature: {
+ x5u: `http://localhost:${port}/test_blocklist_signatures/test_cert_chain.pem`,
+ public_key: "fake",
+ "content-signature": `x5u=http://localhost:${port}/test_blocklist_signatures/test_cert_chain.pem;p384ecdsa=${signature}`,
+ signature_encoding: "rs_base64url",
+ signature: signature,
+ hash_algorithm: "sha384",
+ ref: "1yryrnmzou5rf31ou80znpnq8n"
+ }
+ }
+ };
+ }
+
+ function makeMetaResponse(eTag, body, comment) {
+ return {
+ comment: comment,
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ `ETag: \"${eTag}\"`
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify(body)
+ };
+ }
+
+ function registerHandlers(responses){
+ function handleResponse (serverTimeMillis, request, response) {
+ const key = `${request.method}:${request.path}?${request.queryString}`;
+ const available = responses[key];
+ const sampled = available.length > 1 ? available.shift() : available[0];
+
+ if (!sampled) {
+ do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
+ }
+
+ response.setStatusLine(null, sampled.status.status,
+ sampled.status.statusText);
+ // send the headers
+ for (let headerLine of sampled.sampleHeaders) {
+ let headerElements = headerLine.split(':');
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+
+ // set the server date
+ response.setHeader("Date", (new Date(serverTimeMillis)).toUTCString());
+
+ response.write(sampled.responseBody);
+ }
+
+ for (let key of Object.keys(responses)) {
+ const keyParts = key.split(":");
+ const method = keyParts[0];
+ const valueParts = keyParts[1].split("?");
+ const path = valueParts[0];
+
+ server.registerPathHandler(path, handleResponse.bind(null, 2000));
+ }
+ }
+
+ // First, perform a signature verification with known data and signature
+ // to ensure things are working correctly
+ let verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
+ .createInstance(Ci.nsIContentSignatureVerifier);
+
+ const emptyData = '[]';
+ const emptySignature = "p384ecdsa=zbugm2FDitsHwk5-IWsas1PpWwY29f0Fg5ZHeqD8fzep7AVl2vfcaHA7LdmCZ28qZLOioGKvco3qT117Q4-HlqFTJM7COHzxGyU2MMJ0ZTnhJrPOC1fP3cVQjU1PTWi9";
+ const name = "onecrl.content-signature.mozilla.org";
+ ok(verifier.verifyContentSignature(emptyData, emptySignature,
+ getCertChain(), name));
+
+ verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
+ .createInstance(Ci.nsIContentSignatureVerifier);
+
+ const collectionData = '[{"details":{"bug":"https://bugzilla.mozilla.org/show_bug.cgi?id=1155145","created":"2016-01-18T14:43:37Z","name":"GlobalSign certs","who":".","why":"."},"enabled":true,"id":"97fbf7c4-3ef2-f54f-0029-1ba6540c63ea","issuerName":"MHExKDAmBgNVBAMTH0dsb2JhbFNpZ24gUm9vdFNpZ24gUGFydG5lcnMgQ0ExHTAbBgNVBAsTFFJvb3RTaWduIFBhcnRuZXJzIENBMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMQswCQYDVQQGEwJCRQ==","last_modified":2000,"serialNumber":"BAAAAAABA/A35EU="},{"details":{"bug":"https://bugzilla.mozilla.org/show_bug.cgi?id=1155145","created":"2016-01-18T14:48:11Z","name":"GlobalSign certs","who":".","why":"."},"enabled":true,"id":"e3bd531e-1ee4-7407-27ce-6fdc9cecbbdc","issuerName":"MIGBMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTElMCMGA1UECxMcUHJpbWFyeSBPYmplY3QgUHVibGlzaGluZyBDQTEwMC4GA1UEAxMnR2xvYmFsU2lnbiBQcmltYXJ5IE9iamVjdCBQdWJsaXNoaW5nIENB","last_modified":3000,"serialNumber":"BAAAAAABI54PryQ="}]';
+ const collectionSignature = "p384ecdsa=f4pA2tYM5jQgWY6YUmhUwQiBLj6QO5sHLD_5MqLePz95qv-7cNCuQoZnPQwxoptDtW8hcWH3kLb0quR7SB-r82gkpR9POVofsnWJRA-ETb0BcIz6VvI3pDT49ZLlNg3p";
+
+ ok(verifier.verifyContentSignature(collectionData, collectionSignature, getCertChain(), name));
+
+ // set up prefs so the kinto updater talks to the test server
+ Services.prefs.setCharPref(PREF_SETTINGS_SERVER,
+ `http://localhost:${server.identity.primaryPort}/v1`);
+
+ // Set up some data we need for our test
+ let startTime = Date.now();
+
+ // These are records we'll use in the test collections
+ const RECORD1 = {
+ details: {
+ bug: "https://bugzilla.mozilla.org/show_bug.cgi?id=1155145",
+ created: "2016-01-18T14:43:37Z",
+ name: "GlobalSign certs",
+ who: ".",
+ why: "."
+ },
+ enabled: true,
+ id: "97fbf7c4-3ef2-f54f-0029-1ba6540c63ea",
+ issuerName: "MHExKDAmBgNVBAMTH0dsb2JhbFNpZ24gUm9vdFNpZ24gUGFydG5lcnMgQ0ExHTAbBgNVBAsTFFJvb3RTaWduIFBhcnRuZXJzIENBMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMQswCQYDVQQGEwJCRQ==",
+ last_modified: 2000,
+ serialNumber: "BAAAAAABA/A35EU="
+ };
+
+ const RECORD2 = {
+ details: {
+ bug: "https://bugzilla.mozilla.org/show_bug.cgi?id=1155145",
+ created: "2016-01-18T14:48:11Z",
+ name: "GlobalSign certs",
+ who: ".",
+ why: "."
+ },
+ enabled: true,
+ id: "e3bd531e-1ee4-7407-27ce-6fdc9cecbbdc",
+ issuerName: "MIGBMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTElMCMGA1UECxMcUHJpbWFyeSBPYmplY3QgUHVibGlzaGluZyBDQTEwMC4GA1UEAxMnR2xvYmFsU2lnbiBQcmltYXJ5IE9iamVjdCBQdWJsaXNoaW5nIENB",
+ last_modified: 3000,
+ serialNumber: "BAAAAAABI54PryQ="
+ };
+
+ const RECORD3 = {
+ details: {
+ bug: "https://bugzilla.mozilla.org/show_bug.cgi?id=1155145",
+ created: "2016-01-18T14:48:11Z",
+ name: "GlobalSign certs",
+ who: ".",
+ why: "."
+ },
+ enabled: true,
+ id: "c7c49b69-a4ab-418e-92a9-e1961459aa7f",
+ issuerName: "MIGBMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTElMCMGA1UECxMcUHJpbWFyeSBPYmplY3QgUHVibGlzaGluZyBDQTEwMC4GA1UEAxMnR2xvYmFsU2lnbiBQcmltYXJ5IE9iamVjdCBQdWJsaXNoaW5nIENB",
+ last_modified: 4000,
+ serialNumber: "BAAAAAABI54PryQ="
+ };
+
+ const RECORD1_DELETION = {
+ deleted: true,
+ enabled: true,
+ id: "97fbf7c4-3ef2-f54f-0029-1ba6540c63ea",
+ last_modified: 3500,
+ };
+
+ // Check that a signature on an empty collection is OK
+ // We need to set up paths on the HTTP server to return specific data from
+ // specific paths for each test. Here we prepare data for each response.
+
+ // A cert chain response (this the cert chain that contains the signing
+ // cert, the root and any intermediates in between). This is used in each
+ // sync.
+ const RESPONSE_CERT_CHAIN = {
+ comment: "RESPONSE_CERT_CHAIN",
+ sampleHeaders: [
+ "Content-Type: text/plain; charset=UTF-8"
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: getCertChain()
+ };
+
+ // A server settings response. This is used in each sync.
+ const RESPONSE_SERVER_SETTINGS = {
+ comment: "RESPONSE_SERVER_SETTINGS",
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify({"settings":{"batch_max_requests":25}, "url":`http://localhost:${port}/v1/`, "documentation":"https://kinto.readthedocs.org/", "version":"1.5.1", "commit":"cbc6f58", "hello":"kinto"})
+ };
+
+ // This is the initial, empty state of the collection. This is only used
+ // for the first sync.
+ const RESPONSE_EMPTY_INITIAL = {
+ comment: "RESPONSE_EMPTY_INITIAL",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ "ETag: \"1000\""
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify({"data": []})
+ };
+
+ const RESPONSE_BODY_META_EMPTY_SIG = makeMetaResponseBody(1000,
+ "vxuAg5rDCB-1pul4a91vqSBQRXJG_j7WOYUTswxRSMltdYmbhLRH8R8brQ9YKuNDF56F-w6pn4HWxb076qgKPwgcEBtUeZAO_RtaHXRkRUUgVzAr86yQL4-aJTbv3D6u");
+
+ // The collection metadata containing the signature for the empty
+ // collection.
+ const RESPONSE_META_EMPTY_SIG =
+ makeMetaResponse(1000, RESPONSE_BODY_META_EMPTY_SIG,
+ "RESPONSE_META_EMPTY_SIG");
+
+ // Here, we map request method and path to the available responses
+ const emptyCollectionResponses = {
+ "GET:/test_blocklist_signatures/test_cert_chain.pem?":[RESPONSE_CERT_CHAIN],
+ "GET:/v1/?": [RESPONSE_SERVER_SETTINGS],
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified":
+ [RESPONSE_EMPTY_INITIAL],
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_EMPTY_SIG]
+ };
+
+ // .. and use this map to register handlers for each path
+ registerHandlers(emptyCollectionResponses);
+
+ // With all of this set up, we attempt a sync. This will resolve if all is
+ // well and throw if something goes wrong.
+ yield OneCRLBlocklistClient.maybeSync(1000, startTime);
+
+ // Check that some additions (2 records) to the collection have a valid
+ // signature.
+
+ // This response adds two entries (RECORD1 and RECORD2) to the collection
+ const RESPONSE_TWO_ADDED = {
+ comment: "RESPONSE_TWO_ADDED",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ "ETag: \"3000\""
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify({"data": [RECORD2, RECORD1]})
+ };
+
+ const RESPONSE_BODY_META_TWO_ITEMS_SIG = makeMetaResponseBody(3000,
+ "dwhJeypadNIyzGj3QdI0KMRTPnHhFPF_j73mNrsPAHKMW46S2Ftf4BzsPMvPMB8h0TjDus13wo_R4l432DHe7tYyMIWXY0PBeMcoe5BREhFIxMxTsh9eGVXBD1e3UwRy");
+
+ // A signature response for the collection containg RECORD1 and RECORD2
+ const RESPONSE_META_TWO_ITEMS_SIG =
+ makeMetaResponse(3000, RESPONSE_BODY_META_TWO_ITEMS_SIG,
+ "RESPONSE_META_TWO_ITEMS_SIG");
+
+ const twoItemsResponses = {
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=1000":
+ [RESPONSE_TWO_ADDED],
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_TWO_ITEMS_SIG]
+ };
+ registerHandlers(twoItemsResponses);
+ yield OneCRLBlocklistClient.maybeSync(3000, startTime);
+
+ // Check the collection with one addition and one removal has a valid
+ // signature
+
+ // Remove RECORD1, add RECORD3
+ const RESPONSE_ONE_ADDED_ONE_REMOVED = {
+ comment: "RESPONSE_ONE_ADDED_ONE_REMOVED ",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ "ETag: \"4000\""
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify({"data": [RECORD3, RECORD1_DELETION]})
+ };
+
+ const RESPONSE_BODY_META_THREE_ITEMS_SIG = makeMetaResponseBody(4000,
+ "MIEmNghKnkz12UodAAIc3q_Y4a3IJJ7GhHF4JYNYmm8avAGyPM9fYU7NzVo94pzjotG7vmtiYuHyIX2rTHTbT587w0LdRWxipgFd_PC1mHiwUyjFYNqBBG-kifYk7kEw");
+
+ // signature response for the collection containing RECORD2 and RECORD3
+ const RESPONSE_META_THREE_ITEMS_SIG =
+ makeMetaResponse(4000, RESPONSE_BODY_META_THREE_ITEMS_SIG,
+ "RESPONSE_META_THREE_ITEMS_SIG");
+
+ const oneAddedOneRemovedResponses = {
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=3000":
+ [RESPONSE_ONE_ADDED_ONE_REMOVED],
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_THREE_ITEMS_SIG]
+ };
+ registerHandlers(oneAddedOneRemovedResponses);
+ yield OneCRLBlocklistClient.maybeSync(4000, startTime);
+
+ // Check the signature is still valid with no operation (no changes)
+
+ // Leave the collection unchanged
+ const RESPONSE_EMPTY_NO_UPDATE = {
+ comment: "RESPONSE_EMPTY_NO_UPDATE ",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ "ETag: \"4000\""
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify({"data": []})
+ };
+
+ const noOpResponses = {
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=4000":
+ [RESPONSE_EMPTY_NO_UPDATE],
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_THREE_ITEMS_SIG]
+ };
+ registerHandlers(noOpResponses);
+ yield OneCRLBlocklistClient.maybeSync(4100, startTime);
+
+ // Check the collection is reset when the signature is invalid
+
+ // Prepare a (deliberately) bad signature to check the collection state is
+ // reset if something is inconsistent
+ const RESPONSE_COMPLETE_INITIAL = {
+ comment: "RESPONSE_COMPLETE_INITIAL ",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ "ETag: \"4000\""
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify({"data": [RECORD2, RECORD3]})
+ };
+
+ const RESPONSE_COMPLETE_INITIAL_SORTED_BY_ID = {
+ comment: "RESPONSE_COMPLETE_INITIAL ",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ "ETag: \"4000\""
+ ],
+ status: {status: 200, statusText: "OK"},
+ responseBody: JSON.stringify({"data": [RECORD3, RECORD2]})
+ };
+
+ const RESPONSE_BODY_META_BAD_SIG = makeMetaResponseBody(4000,
+ "aW52YWxpZCBzaWduYXR1cmUK");
+
+ const RESPONSE_META_BAD_SIG =
+ makeMetaResponse(4000, RESPONSE_BODY_META_BAD_SIG, "RESPONSE_META_BAD_SIG");
+
+ const badSigGoodSigResponses = {
+ // In this test, we deliberately serve a bad signature initially. The
+ // subsequent signature returned is a valid one for the three item
+ // collection.
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_BAD_SIG, RESPONSE_META_THREE_ITEMS_SIG],
+ // The first collection state is the three item collection (since
+ // there's a sync with no updates) - but, since the signature is wrong,
+ // another request will be made...
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=4000":
+ [RESPONSE_EMPTY_NO_UPDATE],
+ // The next request is for the full collection. This will be checked
+ // against the valid signature - so the sync should succeed.
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified":
+ [RESPONSE_COMPLETE_INITIAL],
+ // The next request is for the full collection sorted by id. This will be
+ // checked against the valid signature - so the sync should succeed.
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=id":
+ [RESPONSE_COMPLETE_INITIAL_SORTED_BY_ID]
+ };
+
+ registerHandlers(badSigGoodSigResponses);
+ yield OneCRLBlocklistClient.maybeSync(5000, startTime);
+
+ const badSigGoodOldResponses = {
+ // In this test, we deliberately serve a bad signature initially. The
+ // subsequent sitnature returned is a valid one for the three item
+ // collection.
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_BAD_SIG, RESPONSE_META_EMPTY_SIG],
+ // The first collection state is the current state (since there's no update
+ // - but, since the signature is wrong, another request will be made)
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=4000":
+ [RESPONSE_EMPTY_NO_UPDATE],
+ // The next request is for the full collection sorted by id. This will be
+ // checked against the valid signature and last_modified times will be
+ // compared. Sync should fail, even though the signature is good,
+ // because the local collection is newer.
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=id":
+ [RESPONSE_EMPTY_INITIAL],
+ };
+
+ // ensure our collection hasn't been replaced with an older, empty one
+ yield checkRecordCount(2);
+
+ registerHandlers(badSigGoodOldResponses);
+ yield OneCRLBlocklistClient.maybeSync(5000, startTime);
+
+ const allBadSigResponses = {
+ // In this test, we deliberately serve only a bad signature.
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_BAD_SIG],
+ // The first collection state is the three item collection (since
+ // there's a sync with no updates) - but, since the signature is wrong,
+ // another request will be made...
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=4000":
+ [RESPONSE_EMPTY_NO_UPDATE],
+ // The next request is for the full collection sorted by id. This will be
+ // checked against the valid signature - so the sync should succeed.
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=id":
+ [RESPONSE_COMPLETE_INITIAL_SORTED_BY_ID]
+ };
+
+ registerHandlers(allBadSigResponses);
+ try {
+ yield OneCRLBlocklistClient.maybeSync(6000, startTime);
+ do_throw("Sync should fail (the signature is intentionally bad)");
+ } catch (e) {
+ yield checkRecordCount(2);
+ }
+});
+
+function run_test() {
+ // ensure signatures are enforced
+ Services.prefs.setBoolPref(PREF_BLOCKLIST_ENFORCE_SIGNING, true);
+
+ // get a signature verifier to ensure nsNSSComponent is initialized
+ Cc["@mozilla.org/security/contentsignatureverifier;1"]
+ .createInstance(Ci.nsIContentSignatureVerifier);
+
+ // set the content signing root to our test root
+ setRoot();
+
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ run_next_test();
+
+ do_register_cleanup(function() {
+ server.stop(function() { });
+ });
+}
+
+
diff --git a/services/common/tests/unit/test_blocklist_signatures/collection_signing_ee.pem.certspec b/services/common/tests/unit/test_blocklist_signatures/collection_signing_ee.pem.certspec
new file mode 100644
index 000000000..866c357c5
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_signatures/collection_signing_ee.pem.certspec
@@ -0,0 +1,5 @@
+issuer:collection-signer-int-CA
+subject:collection-signer-ee-int-CA
+subjectKey:secp384r1
+extension:extKeyUsage:codeSigning
+extension:subjectAlternativeName:onecrl.content-signature.mozilla.org
diff --git a/services/common/tests/unit/test_blocklist_signatures/collection_signing_int.pem.certspec b/services/common/tests/unit/test_blocklist_signatures/collection_signing_int.pem.certspec
new file mode 100644
index 000000000..8ca4815fa
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_signatures/collection_signing_int.pem.certspec
@@ -0,0 +1,4 @@
+issuer:collection-signer-ca
+subject:collection-signer-int-CA
+extension:basicConstraints:cA,
+extension:extKeyUsage:codeSigning
diff --git a/services/common/tests/unit/test_blocklist_signatures/collection_signing_root.pem.certspec b/services/common/tests/unit/test_blocklist_signatures/collection_signing_root.pem.certspec
new file mode 100644
index 000000000..11bd68768
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_signatures/collection_signing_root.pem.certspec
@@ -0,0 +1,4 @@
+issuer:collection-signer-ca
+subject:collection-signer-ca
+extension:basicConstraints:cA,
+extension:extKeyUsage:codeSigning
diff --git a/services/common/tests/unit/test_blocklist_signatures/moz.build b/services/common/tests/unit/test_blocklist_signatures/moz.build
new file mode 100644
index 000000000..bfcb92c7c
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_signatures/moz.build
@@ -0,0 +1,14 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+test_certificates = (
+ 'collection_signing_root.pem',
+ 'collection_signing_int.pem',
+ 'collection_signing_ee.pem',
+)
+
+for test_certificate in test_certificates:
+ GeneratedTestCertificate(test_certificate)
diff --git a/services/common/tests/unit/test_blocklist_updater.js b/services/common/tests/unit/test_blocklist_updater.js
new file mode 100644
index 000000000..1b71c194a
--- /dev/null
+++ b/services/common/tests/unit/test_blocklist_updater.js
@@ -0,0 +1,173 @@
+Cu.import("resource://testing-common/httpd.js");
+
+var server;
+
+const PREF_SETTINGS_SERVER = "services.settings.server";
+const PREF_LAST_UPDATE = "services.blocklist.last_update_seconds";
+const PREF_LAST_ETAG = "services.blocklist.last_etag";
+const PREF_CLOCK_SKEW_SECONDS = "services.blocklist.clock_skew_seconds";
+
+// Check to ensure maybeSync is called with correct values when a changes
+// document contains information on when a collection was last modified
+add_task(function* test_check_maybeSync(){
+ const changesPath = "/v1/buckets/monitor/collections/changes/records";
+
+ // register a handler
+ function handleResponse (serverTimeMillis, request, response) {
+ try {
+ const sampled = getSampleResponse(request, server.identity.primaryPort);
+ if (!sampled) {
+ do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
+ }
+
+ response.setStatusLine(null, sampled.status.status,
+ sampled.status.statusText);
+ // send the headers
+ for (let headerLine of sampled.sampleHeaders) {
+ let headerElements = headerLine.split(':');
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+
+ // set the server date
+ response.setHeader("Date", (new Date(serverTimeMillis)).toUTCString());
+
+ response.write(sampled.responseBody);
+ } catch (e) {
+ dump(`${e}\n`);
+ }
+ }
+
+ server.registerPathHandler(changesPath, handleResponse.bind(null, 2000));
+
+ // set up prefs so the kinto updater talks to the test server
+ Services.prefs.setCharPref(PREF_SETTINGS_SERVER,
+ `http://localhost:${server.identity.primaryPort}/v1`);
+
+ // set some initial values so we can check these are updated appropriately
+ Services.prefs.setIntPref(PREF_LAST_UPDATE, 0);
+ Services.prefs.setIntPref(PREF_CLOCK_SKEW_SECONDS, 0);
+ Services.prefs.clearUserPref(PREF_LAST_ETAG);
+
+
+ let startTime = Date.now();
+
+ let updater = Cu.import("resource://services-common/blocklist-updater.js");
+
+ let syncPromise = new Promise(function(resolve, reject) {
+ // add a test kinto client that will respond to lastModified information
+ // for a collection called 'test-collection'
+ updater.addTestBlocklistClient("test-collection", {
+ maybeSync(lastModified, serverTime) {
+ do_check_eq(lastModified, 1000);
+ do_check_eq(serverTime, 2000);
+ resolve();
+ }
+ });
+ updater.checkVersions();
+ });
+
+ // ensure we get the maybeSync call
+ yield syncPromise;
+
+ // check the last_update is updated
+ do_check_eq(Services.prefs.getIntPref(PREF_LAST_UPDATE), 2);
+
+ // How does the clock difference look?
+ let endTime = Date.now();
+ let clockDifference = Services.prefs.getIntPref(PREF_CLOCK_SKEW_SECONDS);
+ // we previously set the serverTime to 2 (seconds past epoch)
+ do_check_true(clockDifference <= endTime / 1000
+ && clockDifference >= Math.floor(startTime / 1000) - 2);
+ // Last timestamp was saved. An ETag header value is a quoted string.
+ let lastEtag = Services.prefs.getCharPref(PREF_LAST_ETAG);
+ do_check_eq(lastEtag, "\"1100\"");
+
+ // Simulate a poll with up-to-date collection.
+ Services.prefs.setIntPref(PREF_LAST_UPDATE, 0);
+ // If server has no change, a 304 is received, maybeSync() is not called.
+ updater.addTestBlocklistClient("test-collection", {
+ maybeSync: () => {throw new Error("Should not be called");}
+ });
+ yield updater.checkVersions();
+ // Last update is overwritten
+ do_check_eq(Services.prefs.getIntPref(PREF_LAST_UPDATE), 2);
+
+
+ // Simulate a server error.
+ function simulateErrorResponse (request, response) {
+ response.setHeader("Date", (new Date(3000)).toUTCString());
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.write(JSON.stringify({
+ code: 503,
+ errno: 999,
+ error: "Service Unavailable",
+ }));
+ response.setStatusLine(null, 503, "Service Unavailable");
+ }
+ server.registerPathHandler(changesPath, simulateErrorResponse);
+ // checkVersions() fails with adequate error.
+ let error;
+ try {
+ yield updater.checkVersions();
+ } catch (e) {
+ error = e;
+ }
+ do_check_eq(error.message, "Polling for changes failed.");
+ // When an error occurs, last update was not overwritten (see Date header above).
+ do_check_eq(Services.prefs.getIntPref(PREF_LAST_UPDATE), 2);
+
+ // check negative clock skew times
+
+ // set to a time in the future
+ server.registerPathHandler(changesPath, handleResponse.bind(null, Date.now() + 10000));
+
+ yield updater.checkVersions();
+
+ clockDifference = Services.prefs.getIntPref(PREF_CLOCK_SKEW_SECONDS);
+ // we previously set the serverTime to Date.now() + 10000 ms past epoch
+ do_check_true(clockDifference <= 0 && clockDifference >= -10);
+});
+
+function run_test() {
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ run_next_test();
+
+ do_register_cleanup(function() {
+ server.stop(function() { });
+ });
+}
+
+// get a response for a given request from sample data
+function getSampleResponse(req, port) {
+ const responses = {
+ "GET:/v1/buckets/monitor/collections/changes/records?": {
+ "sampleHeaders": [
+ "Content-Type: application/json; charset=UTF-8",
+ "ETag: \"1100\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data": [{
+ "host": "localhost",
+ "last_modified": 1100,
+ "bucket": "blocklists:aurora",
+ "id": "330a0c5f-fadf-ff0b-40c8-4eb0d924ff6a",
+ "collection": "test-collection"
+ }, {
+ "host": "localhost",
+ "last_modified": 1000,
+ "bucket": "blocklists",
+ "id": "254cbb9e-6888-4d9f-8e60-58b74faa8778",
+ "collection": "test-collection"
+ }]})
+ }
+ };
+
+ if (req.hasHeader("if-none-match") && req.getHeader("if-none-match", "") == "\"1100\"")
+ return {sampleHeaders: [], status: {status: 304, statusText: "Not Modified"}, responseBody: ""};
+
+ return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+ responses[req.method];
+}
diff --git a/services/common/tests/unit/test_hawkclient.js b/services/common/tests/unit/test_hawkclient.js
new file mode 100644
index 000000000..0896cf00c
--- /dev/null
+++ b/services/common/tests/unit/test_hawkclient.js
@@ -0,0 +1,520 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://services-common/hawkclient.js");
+
+const SECOND_MS = 1000;
+const MINUTE_MS = SECOND_MS * 60;
+const HOUR_MS = MINUTE_MS * 60;
+
+const TEST_CREDS = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256"
+};
+
+initTestLogging("Trace");
+
+add_task(function test_now() {
+ let client = new HawkClient("https://example.com");
+
+ do_check_true(client.now() - Date.now() < SECOND_MS);
+});
+
+add_task(function test_updateClockOffset() {
+ let client = new HawkClient("https://example.com");
+
+ let now = new Date();
+ let serverDate = now.toUTCString();
+
+ // Client's clock is off
+ client.now = () => { return now.valueOf() + HOUR_MS; }
+
+ client._updateClockOffset(serverDate);
+
+ // Check that they're close; there will likely be a one-second rounding
+ // error, so checking strict equality will likely fail.
+ //
+ // localtimeOffsetMsec is how many milliseconds to add to the local clock so
+ // that it agrees with the server. We are one hour ahead of the server, so
+ // our offset should be -1 hour.
+ do_check_true(Math.abs(client.localtimeOffsetMsec + HOUR_MS) <= SECOND_MS);
+});
+
+add_task(function* test_authenticated_get_request() {
+ let message = "{\"msg\": \"Great Success!\"}";
+ let method = "GET";
+
+ let server = httpd_setup({"/foo": (request, response) => {
+ do_check_true(request.hasHeader("Authorization"));
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+
+ let response = yield client.request("/foo", method, TEST_CREDS);
+ let result = JSON.parse(response.body);
+
+ do_check_eq("Great Success!", result.msg);
+
+ yield deferredStop(server);
+});
+
+function* check_authenticated_request(method) {
+ let server = httpd_setup({"/foo": (request, response) => {
+ do_check_true(request.hasHeader("Authorization"));
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json");
+ response.bodyOutputStream.writeFrom(request.bodyInputStream, request.bodyInputStream.available());
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+
+ let response = yield client.request("/foo", method, TEST_CREDS, {foo: "bar"});
+ let result = JSON.parse(response.body);
+
+ do_check_eq("bar", result.foo);
+
+ yield deferredStop(server);
+}
+
+add_task(function test_authenticated_post_request() {
+ check_authenticated_request("POST");
+});
+
+add_task(function test_authenticated_put_request() {
+ check_authenticated_request("PUT");
+});
+
+add_task(function test_authenticated_patch_request() {
+ check_authenticated_request("PATCH");
+});
+
+add_task(function* test_extra_headers() {
+ let server = httpd_setup({"/foo": (request, response) => {
+ do_check_true(request.hasHeader("Authorization"));
+ do_check_true(request.hasHeader("myHeader"));
+ do_check_eq(request.getHeader("myHeader"), "fake");
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json");
+ response.bodyOutputStream.writeFrom(request.bodyInputStream, request.bodyInputStream.available());
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+
+ let response = yield client.request("/foo", "POST", TEST_CREDS, {foo: "bar"},
+ {"myHeader": "fake"});
+ let result = JSON.parse(response.body);
+
+ do_check_eq("bar", result.foo);
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_credentials_optional() {
+ let method = "GET";
+ let server = httpd_setup({
+ "/foo": (request, response) => {
+ do_check_false(request.hasHeader("Authorization"));
+
+ let message = JSON.stringify({msg: "you're in the friend zone"});
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+ let result = yield client.request("/foo", method); // credentials undefined
+ do_check_eq(JSON.parse(result.body).msg, "you're in the friend zone");
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_server_error() {
+ let message = "Ohai!";
+ let method = "GET";
+
+ let server = httpd_setup({"/foo": (request, response) => {
+ response.setStatusLine(request.httpVersion, 418, "I am a Teapot");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+
+ try {
+ yield client.request("/foo", method, TEST_CREDS);
+ do_throw("Expected an error");
+ } catch(err) {
+ do_check_eq(418, err.code);
+ do_check_eq("I am a Teapot", err.message);
+ }
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_server_error_json() {
+ let message = JSON.stringify({error: "Cannot get ye flask."});
+ let method = "GET";
+
+ let server = httpd_setup({"/foo": (request, response) => {
+ response.setStatusLine(request.httpVersion, 400, "What wouldst thou deau?");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+
+ try {
+ yield client.request("/foo", method, TEST_CREDS);
+ do_throw("Expected an error");
+ } catch(err) {
+ do_check_eq("Cannot get ye flask.", err.error);
+ }
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_offset_after_request() {
+ let message = "Ohai!";
+ let method = "GET";
+
+ let server = httpd_setup({"/foo": (request, response) => {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+ let now = Date.now();
+ client.now = () => { return now + HOUR_MS; };
+
+ do_check_eq(client.localtimeOffsetMsec, 0);
+
+ let response = yield client.request("/foo", method, TEST_CREDS);
+ // Should be about an hour off
+ do_check_true(Math.abs(client.localtimeOffsetMsec + HOUR_MS) < SECOND_MS);
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_offset_in_hawk_header() {
+ let message = "Ohai!";
+ let method = "GET";
+
+ let server = httpd_setup({
+ "/first": function(request, response) {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(message, message.length);
+ },
+
+ "/second": function(request, response) {
+ // We see a better date now in the ts component of the header
+ let delta = getTimestampDelta(request.getHeader("Authorization"));
+ let message = "Delta: " + delta;
+
+ // We're now within HAWK's one-minute window.
+ // I hope this isn't a recipe for intermittent oranges ...
+ if (delta < MINUTE_MS) {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ } else {
+ response.setStatusLine(request.httpVersion, 400, "Delta: " + delta);
+ }
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+ function getOffset() {
+ return client.localtimeOffsetMsec;
+ }
+
+ client.now = () => {
+ return Date.now() + 12 * HOUR_MS;
+ };
+
+ // We begin with no offset
+ do_check_eq(client.localtimeOffsetMsec, 0);
+ yield client.request("/first", method, TEST_CREDS);
+
+ // After the first server response, our offset is updated to -12 hours.
+ // We should be safely in the window, now.
+ do_check_true(Math.abs(client.localtimeOffsetMsec + 12 * HOUR_MS) < MINUTE_MS);
+ yield client.request("/second", method, TEST_CREDS);
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_2xx_success() {
+ // Just to ensure that we're not biased toward 200 OK for success
+ let credentials = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256"
+ };
+ let method = "GET";
+
+ let server = httpd_setup({"/foo": (request, response) => {
+ response.setStatusLine(request.httpVersion, 202, "Accepted");
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+
+ let response = yield client.request("/foo", method, credentials);
+
+ // Shouldn't be any content in a 202
+ do_check_eq(response.body, "");
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_retry_request_on_fail() {
+ let attempts = 0;
+ let credentials = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256"
+ };
+ let method = "GET";
+
+ let server = httpd_setup({
+ "/maybe": function(request, response) {
+ // This path should be hit exactly twice; once with a bad timestamp, and
+ // again when the client retries the request with a corrected timestamp.
+ attempts += 1;
+ do_check_true(attempts <= 2);
+
+ let delta = getTimestampDelta(request.getHeader("Authorization"));
+
+ // First time through, we should have a bad timestamp
+ if (attempts === 1) {
+ do_check_true(delta > MINUTE_MS);
+ let message = "never!!!";
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.bodyOutputStream.write(message, message.length);
+ return;
+ }
+
+ // Second time through, timestamp should be corrected by client
+ do_check_true(delta < MINUTE_MS);
+ let message = "i love you!!!";
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(message, message.length);
+ return;
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+ function getOffset() {
+ return client.localtimeOffsetMsec;
+ }
+
+ client.now = () => {
+ return Date.now() + 12 * HOUR_MS;
+ };
+
+ // We begin with no offset
+ do_check_eq(client.localtimeOffsetMsec, 0);
+
+ // Request will have bad timestamp; client will retry once
+ let response = yield client.request("/maybe", method, credentials);
+ do_check_eq(response.body, "i love you!!!");
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_multiple_401_retry_once() {
+ // Like test_retry_request_on_fail, but always return a 401
+ // and ensure that the client only retries once.
+ let attempts = 0;
+ let credentials = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256"
+ };
+ let method = "GET";
+
+ let server = httpd_setup({
+ "/maybe": function(request, response) {
+ // This path should be hit exactly twice; once with a bad timestamp, and
+ // again when the client retries the request with a corrected timestamp.
+ attempts += 1;
+
+ do_check_true(attempts <= 2);
+
+ let message = "never!!!";
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+ function getOffset() {
+ return client.localtimeOffsetMsec;
+ }
+
+ client.now = () => {
+ return Date.now() - 12 * HOUR_MS;
+ };
+
+ // We begin with no offset
+ do_check_eq(client.localtimeOffsetMsec, 0);
+
+ // Request will have bad timestamp; client will retry once
+ try {
+ yield client.request("/maybe", method, credentials);
+ do_throw("Expected an error");
+ } catch (err) {
+ do_check_eq(err.code, 401);
+ }
+ do_check_eq(attempts, 2);
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_500_no_retry() {
+ // If we get a 500 error, the client should not retry (as it would with a
+ // 401)
+ let credentials = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256"
+ };
+ let method = "GET";
+
+ let server = httpd_setup({
+ "/no-shutup": function() {
+ let message = "Cannot get ye flask.";
+ response.setStatusLine(request.httpVersion, 500, "Internal server error");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+ function getOffset() {
+ return client.localtimeOffsetMsec;
+ }
+
+ // Throw off the clock so the HawkClient would want to retry the request if
+ // it could
+ client.now = () => {
+ return Date.now() - 12 * HOUR_MS;
+ };
+
+ // Request will 500; no retries
+ try {
+ yield client.request("/no-shutup", method, credentials);
+ do_throw("Expected an error");
+ } catch(err) {
+ do_check_eq(err.code, 500);
+ }
+
+ yield deferredStop(server);
+});
+
+add_task(function* test_401_then_500() {
+ // Like test_multiple_401_retry_once, but return a 500 to the
+ // second request, ensuring that the promise is properly rejected
+ // in client.request.
+ let attempts = 0;
+ let credentials = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256"
+ };
+ let method = "GET";
+
+ let server = httpd_setup({
+ "/maybe": function(request, response) {
+ // This path should be hit exactly twice; once with a bad timestamp, and
+ // again when the client retries the request with a corrected timestamp.
+ attempts += 1;
+ do_check_true(attempts <= 2);
+
+ let delta = getTimestampDelta(request.getHeader("Authorization"));
+
+ // First time through, we should have a bad timestamp
+ // Client will retry
+ if (attempts === 1) {
+ do_check_true(delta > MINUTE_MS);
+ let message = "never!!!";
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.bodyOutputStream.write(message, message.length);
+ return;
+ }
+
+ // Second time through, timestamp should be corrected by client
+ // And fail on the client
+ do_check_true(delta < MINUTE_MS);
+ let message = "Cannot get ye flask.";
+ response.setStatusLine(request.httpVersion, 500, "Internal server error");
+ response.bodyOutputStream.write(message, message.length);
+ return;
+ }
+ });
+
+ let client = new HawkClient(server.baseURI);
+ function getOffset() {
+ return client.localtimeOffsetMsec;
+ }
+
+ client.now = () => {
+ return Date.now() - 12 * HOUR_MS;
+ };
+
+ // We begin with no offset
+ do_check_eq(client.localtimeOffsetMsec, 0);
+
+ // Request will have bad timestamp; client will retry once
+ try {
+ yield client.request("/maybe", method, credentials);
+ } catch(err) {
+ do_check_eq(err.code, 500);
+ }
+ do_check_eq(attempts, 2);
+
+ yield deferredStop(server);
+});
+
+add_task(function* throw_if_not_json_body() {
+ let client = new HawkClient("https://example.com");
+ try {
+ yield client.request("/bogus", "GET", {}, "I am not json");
+ do_throw("Expected an error");
+ } catch(err) {
+ do_check_true(!!err.message);
+ }
+});
+
+// End of tests.
+// Utility functions follow
+
+function getTimestampDelta(authHeader, now=Date.now()) {
+ let tsMS = new Date(
+ parseInt(/ts="(\d+)"/.exec(authHeader)[1], 10) * SECOND_MS);
+ return Math.abs(tsMS - now);
+}
+
+function deferredStop(server) {
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve);
+ return deferred.promise;
+}
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
diff --git a/services/common/tests/unit/test_hawkrequest.js b/services/common/tests/unit/test_hawkrequest.js
new file mode 100644
index 000000000..7f598125a
--- /dev/null
+++ b/services/common/tests/unit/test_hawkrequest.js
@@ -0,0 +1,235 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://services-common/hawkrequest.js");
+
+// https://github.com/mozilla/fxa-auth-server/wiki/onepw-protocol#wiki-use-session-certificatesign-etc
+var SESSION_KEYS = {
+ sessionToken: h("a0a1a2a3a4a5a6a7 a8a9aaabacadaeaf"+
+ "b0b1b2b3b4b5b6b7 b8b9babbbcbdbebf"),
+
+ tokenID: h("c0a29dcf46174973 da1378696e4c82ae"+
+ "10f723cf4f4d9f75 e39f4ae3851595ab"),
+
+ reqHMACkey: h("9d8f22998ee7f579 8b887042466b72d5"+
+ "3e56ab0c094388bf 65831f702d2febc0"),
+};
+
+function do_register_cleanup() {
+ Services.prefs.resetUserPrefs();
+
+ // remove the pref change listener
+ let hawk = new HAWKAuthenticatedRESTRequest("https://example.com");
+ hawk._intl.uninit();
+}
+
+function run_test() {
+ Log.repository.getLogger("Services.Common.RESTRequest").level =
+ Log.Level.Trace;
+ initTestLogging("Trace");
+
+ run_next_test();
+}
+
+
+add_test(function test_intl_accept_language() {
+ let testCount = 0;
+ let languages = [
+ "zu-NP;vo", // Nepalese dialect of Zulu, defaulting to Volapük
+ "fa-CG;ik", // Congolese dialect of Farsei, defaulting to Inupiaq
+ ];
+
+ function setLanguagePref(lang) {
+ let acceptLanguage = Cc["@mozilla.org/supports-string;1"]
+ .createInstance(Ci.nsISupportsString);
+ acceptLanguage.data = lang;
+ Services.prefs.setComplexValue(
+ "intl.accept_languages", Ci.nsISupportsString, acceptLanguage);
+ }
+
+ let hawk = new HAWKAuthenticatedRESTRequest("https://example.com");
+
+ Services.prefs.addObserver("intl.accept_languages", checkLanguagePref, false);
+ setLanguagePref(languages[testCount]);
+
+ function checkLanguagePref() {
+ var _done = false;
+ CommonUtils.nextTick(function() {
+ // Ensure we're only called for the number of entries in languages[].
+ do_check_true(testCount < languages.length);
+
+ do_check_eq(hawk._intl.accept_languages, languages[testCount]);
+
+ testCount++;
+ if (testCount < languages.length) {
+ // Set next language in prefs; Pref service will call checkNextLanguage.
+ setLanguagePref(languages[testCount]);
+ return;
+ }
+
+ // We've checked all the entries in languages[]. Cleanup and move on.
+ do_print("Checked " + testCount + " languages. Removing checkLanguagePref as pref observer.");
+ Services.prefs.removeObserver("intl.accept_languages", checkLanguagePref);
+ run_next_test();
+ return;
+ });
+ }
+});
+
+add_test(function test_hawk_authenticated_request() {
+ let onProgressCalled = false;
+ let postData = {your: "data"};
+
+ // An arbitrary date - Feb 2, 1971. It ends in a bunch of zeroes to make our
+ // computation with the hawk timestamp easier, since hawk throws away the
+ // millisecond values.
+ let then = 34329600000;
+
+ let clockSkew = 120000;
+ let timeOffset = -1 * clockSkew;
+ let localTime = then + clockSkew;
+
+ // Set the accept-languages pref to the Nepalese dialect of Zulu.
+ let acceptLanguage = Cc['@mozilla.org/supports-string;1'].createInstance(Ci.nsISupportsString);
+ acceptLanguage.data = 'zu-NP'; // omit trailing ';', which our HTTP libs snip
+ Services.prefs.setComplexValue('intl.accept_languages', Ci.nsISupportsString, acceptLanguage);
+
+ let credentials = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256"
+ };
+
+ let server = httpd_setup({
+ "/elysium": function(request, response) {
+ do_check_true(request.hasHeader("Authorization"));
+
+ // check that the header timestamp is our arbitrary system date, not
+ // today's date. Note that hawk header timestamps are in seconds, not
+ // milliseconds.
+ let authorization = request.getHeader("Authorization");
+ let tsMS = parseInt(/ts="(\d+)"/.exec(authorization)[1], 10) * 1000;
+ do_check_eq(tsMS, then);
+
+ // This testing can be a little wonky. In an environment where
+ // pref("intl.accept_languages") === 'en-US, en'
+ // the header is sent as:
+ // 'en-US,en;q=0.5'
+ // hence our fake value for acceptLanguage.
+ let lang = request.getHeader("Accept-Language");
+ do_check_eq(lang, acceptLanguage);
+
+ let message = "yay";
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+
+ function onProgress() {
+ onProgressCalled = true;
+ }
+
+ function onComplete(error) {
+ do_check_eq(200, this.response.status);
+ do_check_eq(this.response.body, "yay");
+ do_check_true(onProgressCalled);
+
+ Services.prefs.resetUserPrefs();
+ let pref = Services.prefs.getComplexValue(
+ "intl.accept_languages", Ci.nsIPrefLocalizedString);
+ do_check_neq(acceptLanguage.data, pref.data);
+
+ server.stop(run_next_test);
+ }
+
+ let url = server.baseURI + "/elysium";
+ let extra = {
+ now: localTime,
+ localtimeOffsetMsec: timeOffset
+ };
+
+ let request = new HAWKAuthenticatedRESTRequest(url, credentials, extra);
+
+ // Allow hawk._intl to respond to the language pref change
+ CommonUtils.nextTick(function() {
+ request.post(postData, onComplete, onProgress);
+ });
+});
+
+add_test(function test_hawk_language_pref_changed() {
+ let languages = [
+ "zu-NP", // Nepalese dialect of Zulu
+ "fa-CG", // Congolese dialect of Farsi
+ ];
+
+ let credentials = {
+ id: "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x",
+ key: "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=",
+ algorithm: "sha256",
+ };
+
+ function setLanguage(lang) {
+ let acceptLanguage = Cc["@mozilla.org/supports-string;1"].createInstance(Ci.nsISupportsString);
+ acceptLanguage.data = lang;
+ Services.prefs.setComplexValue("intl.accept_languages", Ci.nsISupportsString, acceptLanguage);
+ }
+
+ let server = httpd_setup({
+ "/foo": function(request, response) {
+ do_check_eq(languages[1], request.getHeader("Accept-Language"));
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ },
+ });
+
+ let url = server.baseURI + "/foo";
+ let postData = {};
+ let request;
+
+ setLanguage(languages[0]);
+
+ // A new request should create the stateful object for tracking the current
+ // language.
+ request = new HAWKAuthenticatedRESTRequest(url, credentials);
+ CommonUtils.nextTick(testFirstLanguage);
+
+ function testFirstLanguage() {
+ do_check_eq(languages[0], request._intl.accept_languages);
+
+ // Change the language pref ...
+ setLanguage(languages[1]);
+ CommonUtils.nextTick(testRequest);
+ }
+
+ function testRequest() {
+ // Change of language pref should be picked up, which we can see on the
+ // server by inspecting the request headers.
+ request = new HAWKAuthenticatedRESTRequest(url, credentials);
+ request.post({}, function(error) {
+ do_check_null(error);
+ do_check_eq(200, this.response.status);
+
+ Services.prefs.resetUserPrefs();
+
+ server.stop(run_next_test);
+ });
+ }
+});
+
+add_task(function test_deriveHawkCredentials() {
+ let credentials = deriveHawkCredentials(
+ SESSION_KEYS.sessionToken, "sessionToken");
+
+ do_check_eq(credentials.algorithm, "sha256");
+ do_check_eq(credentials.id, SESSION_KEYS.tokenID);
+ do_check_eq(CommonUtils.bytesAsHex(credentials.key), SESSION_KEYS.reqHMACkey);
+});
+
+// turn formatted test vectors into normal hex strings
+function h(hexStr) {
+ return hexStr.replace(/\s+/g, "");
+}
diff --git a/services/common/tests/unit/test_kinto.js b/services/common/tests/unit/test_kinto.js
new file mode 100644
index 000000000..9c5ce58d9
--- /dev/null
+++ b/services/common/tests/unit/test_kinto.js
@@ -0,0 +1,412 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/kinto-offline-client.js");
+Cu.import("resource://testing-common/httpd.js");
+
+const BinaryInputStream = Components.Constructor("@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream", "setInputStream");
+
+var server;
+
+// set up what we need to make storage adapters
+const Kinto = loadKinto();
+const FirefoxAdapter = Kinto.adapters.FirefoxAdapter;
+const kintoFilename = "kinto.sqlite";
+
+let kintoClient;
+
+function do_get_kinto_collection() {
+ if (!kintoClient) {
+ let config = {
+ remote:`http://localhost:${server.identity.primaryPort}/v1/`,
+ headers: {Authorization: "Basic " + btoa("user:pass")},
+ adapter: FirefoxAdapter
+ };
+ kintoClient = new Kinto(config);
+ }
+ return kintoClient.collection("test_collection");
+}
+
+function* clear_collection() {
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ yield collection.clear();
+ } finally {
+ yield collection.db.close();
+ }
+}
+
+// test some operations on a local collection
+add_task(function* test_kinto_add_get() {
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+
+ let newRecord = { foo: "bar" };
+ // check a record is created
+ let createResult = yield collection.create(newRecord);
+ do_check_eq(createResult.data.foo, newRecord.foo);
+ // check getting the record gets the same info
+ let getResult = yield collection.get(createResult.data.id);
+ deepEqual(createResult.data, getResult.data);
+ // check what happens if we create the same item again (it should throw
+ // since you can't create with id)
+ try {
+ yield collection.create(createResult.data);
+ do_throw("Creation of a record with an id should fail");
+ } catch (err) { }
+ // try a few creates without waiting for the first few to resolve
+ let promises = [];
+ promises.push(collection.create(newRecord));
+ promises.push(collection.create(newRecord));
+ promises.push(collection.create(newRecord));
+ yield collection.create(newRecord);
+ yield Promise.all(promises);
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+// test some operations on multiple connections
+add_task(function* test_kinto_add_get() {
+ const collection1 = do_get_kinto_collection();
+ const collection2 = kintoClient.collection("test_collection_2");
+
+ try {
+ yield collection1.db.open();
+ yield collection2.db.open();
+
+ let newRecord = { foo: "bar" };
+
+ // perform several write operations alternately without waiting for promises
+ // to resolve
+ let promises = [];
+ for (let i = 0; i < 10; i++) {
+ promises.push(collection1.create(newRecord));
+ promises.push(collection2.create(newRecord));
+ }
+
+ // ensure subsequent operations still work
+ yield Promise.all([collection1.create(newRecord),
+ collection2.create(newRecord)]);
+ yield Promise.all(promises);
+ } finally {
+ yield collection1.db.close();
+ yield collection2.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+add_task(function* test_kinto_update() {
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ const newRecord = { foo: "bar" };
+ // check a record is created
+ let createResult = yield collection.create(newRecord);
+ do_check_eq(createResult.data.foo, newRecord.foo);
+ do_check_eq(createResult.data._status, "created");
+ // check we can update this OK
+ let copiedRecord = Object.assign(createResult.data, {});
+ deepEqual(createResult.data, copiedRecord);
+ copiedRecord.foo = "wibble";
+ let updateResult = yield collection.update(copiedRecord);
+ // check the field was updated
+ do_check_eq(updateResult.data.foo, copiedRecord.foo);
+ // check the status is still "created", since we haven't synced
+ // the record
+ do_check_eq(updateResult.data._status, "created");
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+add_task(function* test_kinto_clear() {
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+
+ // create an expected number of records
+ const expected = 10;
+ const newRecord = { foo: "bar" };
+ for (let i = 0; i < expected; i++) {
+ yield collection.create(newRecord);
+ }
+ // check the collection contains the correct number
+ let list = yield collection.list();
+ do_check_eq(list.data.length, expected);
+ // clear the collection and check again - should be 0
+ yield collection.clear();
+ list = yield collection.list();
+ do_check_eq(list.data.length, 0);
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+add_task(function* test_kinto_delete(){
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ const newRecord = { foo: "bar" };
+ // check a record is created
+ let createResult = yield collection.create(newRecord);
+ do_check_eq(createResult.data.foo, newRecord.foo);
+ // check getting the record gets the same info
+ let getResult = yield collection.get(createResult.data.id);
+ deepEqual(createResult.data, getResult.data);
+ // delete that record
+ let deleteResult = yield collection.delete(createResult.data.id);
+ // check the ID is set on the result
+ do_check_eq(getResult.data.id, deleteResult.data.id);
+ // and check that get no longer returns the record
+ try {
+ getResult = yield collection.get(createResult.data.id);
+ do_throw("there should not be a result");
+ } catch (e) { }
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(function* test_kinto_list(){
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ const expected = 10;
+ const created = [];
+ for (let i = 0; i < expected; i++) {
+ let newRecord = { foo: "test " + i };
+ let createResult = yield collection.create(newRecord);
+ created.push(createResult.data);
+ }
+ // check the collection contains the correct number
+ let list = yield collection.list();
+ do_check_eq(list.data.length, expected);
+
+ // check that all created records exist in the retrieved list
+ for (let createdRecord of created) {
+ let found = false;
+ for (let retrievedRecord of list.data) {
+ if (createdRecord.id == retrievedRecord.id) {
+ deepEqual(createdRecord, retrievedRecord);
+ found = true;
+ }
+ }
+ do_check_true(found);
+ }
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+add_task(function* test_loadDump_ignores_already_imported_records(){
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ const record = {id: "41b71c13-17e9-4ee3-9268-6a41abf9730f", title: "foo", last_modified: 1457896541};
+ yield collection.loadDump([record]);
+ let impactedRecords = yield collection.loadDump([record]);
+ do_check_eq(impactedRecords.length, 0);
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+add_task(function* test_loadDump_should_overwrite_old_records(){
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ const record = {id: "41b71c13-17e9-4ee3-9268-6a41abf9730f", title: "foo", last_modified: 1457896541};
+ yield collection.loadDump([record]);
+ const updated = Object.assign({}, record, {last_modified: 1457896543});
+ let impactedRecords = yield collection.loadDump([updated]);
+ do_check_eq(impactedRecords.length, 1);
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+add_task(function* test_loadDump_should_not_overwrite_unsynced_records(){
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ const recordId = "41b71c13-17e9-4ee3-9268-6a41abf9730f";
+ yield collection.create({id: recordId, title: "foo"}, {useRecordId: true});
+ const record = {id: recordId, title: "bar", last_modified: 1457896541};
+ let impactedRecords = yield collection.loadDump([record]);
+ do_check_eq(impactedRecords.length, 0);
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+add_task(function* test_loadDump_should_not_overwrite_records_without_last_modified(){
+ const collection = do_get_kinto_collection();
+ try {
+ yield collection.db.open();
+ const recordId = "41b71c13-17e9-4ee3-9268-6a41abf9730f";
+ yield collection.create({id: recordId, title: "foo"}, {synced: true});
+ const record = {id: recordId, title: "bar", last_modified: 1457896541};
+ let impactedRecords = yield collection.loadDump([record]);
+ do_check_eq(impactedRecords.length, 0);
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+add_task(clear_collection);
+
+// Now do some sanity checks against a server - we're not looking to test
+// core kinto.js functionality here (there is excellent test coverage in
+// kinto.js), more making sure things are basically working as expected.
+add_task(function* test_kinto_sync(){
+ const configPath = "/v1/";
+ const recordsPath = "/v1/buckets/default/collections/test_collection/records";
+ // register a handler
+ function handleResponse (request, response) {
+ try {
+ const sampled = getSampleResponse(request, server.identity.primaryPort);
+ if (!sampled) {
+ do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
+ }
+
+ response.setStatusLine(null, sampled.status.status,
+ sampled.status.statusText);
+ // send the headers
+ for (let headerLine of sampled.sampleHeaders) {
+ let headerElements = headerLine.split(':');
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+ response.setHeader("Date", (new Date()).toUTCString());
+
+ response.write(sampled.responseBody);
+ } catch (e) {
+ dump(`${e}\n`);
+ }
+ }
+ server.registerPathHandler(configPath, handleResponse);
+ server.registerPathHandler(recordsPath, handleResponse);
+
+ // create an empty collection, sync to populate
+ const collection = do_get_kinto_collection();
+ try {
+ let result;
+
+ yield collection.db.open();
+ result = yield collection.sync();
+ do_check_true(result.ok);
+
+ // our test data has a single record; it should be in the local collection
+ let list = yield collection.list();
+ do_check_eq(list.data.length, 1);
+
+ // now sync again; we should now have 2 records
+ result = yield collection.sync();
+ do_check_true(result.ok);
+ list = yield collection.list();
+ do_check_eq(list.data.length, 2);
+
+ // sync again; the second records should have been modified
+ const before = list.data[0].title;
+ result = yield collection.sync();
+ do_check_true(result.ok);
+ list = yield collection.list();
+ const after = list.data[0].title;
+ do_check_neq(before, after);
+ } finally {
+ yield collection.db.close();
+ }
+});
+
+function run_test() {
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ run_next_test();
+
+ do_register_cleanup(function() {
+ server.stop(function() { });
+ });
+}
+
+// get a response for a given request from sample data
+function getSampleResponse(req, port) {
+ const responses = {
+ "OPTIONS": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Headers: Content-Length,Expires,Backoff,Retry-After,Last-Modified,Total-Records,ETag,Pragma,Cache-Control,authorization,content-type,if-none-match,Alert,Next-Page",
+ "Access-Control-Allow-Methods: GET,HEAD,OPTIONS,POST,DELETE,OPTIONS",
+ "Access-Control-Allow-Origin: *",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": "null"
+ },
+ "GET:/v1/?": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"settings":{"batch_max_requests":25}, "url":`http://localhost:${port}/v1/`, "documentation":"https://kinto.readthedocs.org/", "version":"1.5.1", "commit":"cbc6f58", "hello":"kinto"})
+ },
+ "GET:/v1/buckets/default/collections/test_collection/records?_sort=-last_modified": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"1445606341071\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{"last_modified":1445606341071, "done":false, "id":"68db8313-686e-4fff-835e-07d78ad6f2af", "title":"New test"}]})
+ },
+ "GET:/v1/buckets/default/collections/test_collection/records?_sort=-last_modified&_since=1445606341071": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"1445607941223\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{"last_modified":1445607941223, "done":false, "id":"901967b0-f729-4b30-8d8d-499cba7f4b1d", "title":"Another new test"}]})
+ },
+ "GET:/v1/buckets/default/collections/test_collection/records?_sort=-last_modified&_since=1445607941223": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"1445607541265\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": JSON.stringify({"data":[{"last_modified":1445607541265, "done":false, "id":"901967b0-f729-4b30-8d8d-499cba7f4b1d", "title":"Modified title"}]})
+ }
+ };
+ return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+ responses[req.method];
+
+}
diff --git a/services/common/tests/unit/test_load_modules.js b/services/common/tests/unit/test_load_modules.js
new file mode 100644
index 000000000..66ecf0734
--- /dev/null
+++ b/services/common/tests/unit/test_load_modules.js
@@ -0,0 +1,69 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Components.utils.import("resource://gre/modules/AppConstants.jsm");
+
+const MODULE_BASE = "resource://services-common/";
+const shared_modules = [
+ "async.js",
+ "logmanager.js",
+ "rest.js",
+ "stringbundle.js",
+ "utils.js",
+];
+
+const non_android_modules = [
+ "tokenserverclient.js",
+];
+
+const TEST_BASE = "resource://testing-common/services/common/";
+const shared_test_modules = [
+ "logging.js",
+];
+
+const non_android_test_modules = [
+ "storageserver.js",
+];
+
+function expectImportsToSucceed(mm, base=MODULE_BASE) {
+ for (let m of mm) {
+ let resource = base + m;
+ let succeeded = false;
+ try {
+ Components.utils.import(resource, {});
+ succeeded = true;
+ } catch (e) {}
+
+ if (!succeeded) {
+ throw "Importing " + resource + " should have succeeded!";
+ }
+ }
+}
+
+function expectImportsToFail(mm, base=MODULE_BASE) {
+ for (let m of mm) {
+ let resource = base + m;
+ let succeeded = false;
+ try {
+ Components.utils.import(resource, {});
+ succeeded = true;
+ } catch (e) {}
+
+ if (succeeded) {
+ throw "Importing " + resource + " should have failed!";
+ }
+ }
+}
+
+function run_test() {
+ expectImportsToSucceed(shared_modules);
+ expectImportsToSucceed(shared_test_modules, TEST_BASE);
+
+ if (AppConstants.platform != "android") {
+ expectImportsToSucceed(non_android_modules);
+ expectImportsToSucceed(non_android_test_modules, TEST_BASE);
+ } else {
+ expectImportsToFail(non_android_modules);
+ expectImportsToFail(non_android_test_modules, TEST_BASE);
+ }
+}
diff --git a/services/common/tests/unit/test_logmanager.js b/services/common/tests/unit/test_logmanager.js
new file mode 100644
index 000000000..13e5caa0a
--- /dev/null
+++ b/services/common/tests/unit/test_logmanager.js
@@ -0,0 +1,229 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// NOTE: The sync test_errorhandler_* tests have quite good coverage for
+// other aspects of this.
+
+Cu.import("resource://services-common/logmanager.js");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://gre/modules/FileUtils.jsm");
+
+function run_test() {
+ run_next_test();
+}
+
+// Returns an array of [consoleAppender, dumpAppender, [fileAppenders]] for
+// the specified log. Note that fileAppenders will usually have length=1
+function getAppenders(log) {
+ let capps = log.appenders.filter(app => app instanceof Log.ConsoleAppender);
+ equal(capps.length, 1, "should only have one console appender");
+ let dapps = log.appenders.filter(app => app instanceof Log.DumpAppender);
+ equal(dapps.length, 1, "should only have one dump appender");
+ let fapps = log.appenders.filter(app => app instanceof Log.StorageStreamAppender);
+ return [capps[0], dapps[0], fapps];
+}
+
+// Test that the correct thing happens when no prefs exist for the log manager.
+add_task(function* test_noPrefs() {
+ // tell the log manager to init with a pref branch that doesn't exist.
+ let lm = new LogManager("no-such-branch.", ["TestLog"], "test");
+
+ let log = Log.repository.getLogger("TestLog");
+ let [capp, dapp, fapps] = getAppenders(log);
+ // The console appender gets "Fatal" while the "dump" appender gets "Error" levels
+ equal(capp.level, Log.Level.Fatal);
+ equal(dapp.level, Log.Level.Error);
+ // and the file (stream) appender gets Debug by default
+ equal(fapps.length, 1, "only 1 file appender");
+ equal(fapps[0].level, Log.Level.Debug);
+ lm.finalize();
+});
+
+// Test that changes to the prefs used by the log manager are updated dynamically.
+add_task(function* test_PrefChanges() {
+ Services.prefs.setCharPref("log-manager.test.log.appender.console", "Trace");
+ Services.prefs.setCharPref("log-manager.test.log.appender.dump", "Trace");
+ Services.prefs.setCharPref("log-manager.test.log.appender.file.level", "Trace");
+ let lm = new LogManager("log-manager.test.", ["TestLog2"], "test");
+
+ let log = Log.repository.getLogger("TestLog2");
+ let [capp, dapp, [fapp]] = getAppenders(log);
+ equal(capp.level, Log.Level.Trace);
+ equal(dapp.level, Log.Level.Trace);
+ equal(fapp.level, Log.Level.Trace);
+ // adjust the prefs and they should magically be reflected in the appenders.
+ Services.prefs.setCharPref("log-manager.test.log.appender.console", "Debug");
+ Services.prefs.setCharPref("log-manager.test.log.appender.dump", "Debug");
+ Services.prefs.setCharPref("log-manager.test.log.appender.file.level", "Debug");
+ equal(capp.level, Log.Level.Debug);
+ equal(dapp.level, Log.Level.Debug);
+ equal(fapp.level, Log.Level.Debug);
+ // and invalid values should cause them to fallback to their defaults.
+ Services.prefs.setCharPref("log-manager.test.log.appender.console", "xxx");
+ Services.prefs.setCharPref("log-manager.test.log.appender.dump", "xxx");
+ Services.prefs.setCharPref("log-manager.test.log.appender.file.level", "xxx");
+ equal(capp.level, Log.Level.Fatal);
+ equal(dapp.level, Log.Level.Error);
+ equal(fapp.level, Log.Level.Debug);
+ lm.finalize();
+});
+
+// Test that the same log used by multiple log managers does the right thing.
+add_task(function* test_SharedLogs() {
+ // create the prefs for the first instance.
+ Services.prefs.setCharPref("log-manager-1.test.log.appender.console", "Trace");
+ Services.prefs.setCharPref("log-manager-1.test.log.appender.dump", "Trace");
+ Services.prefs.setCharPref("log-manager-1.test.log.appender.file.level", "Trace");
+ let lm1 = new LogManager("log-manager-1.test.", ["TestLog3"], "test");
+
+ // and the second.
+ Services.prefs.setCharPref("log-manager-2.test.log.appender.console", "Debug");
+ Services.prefs.setCharPref("log-manager-2.test.log.appender.dump", "Debug");
+ Services.prefs.setCharPref("log-manager-2.test.log.appender.file.level", "Debug");
+ let lm2 = new LogManager("log-manager-2.test.", ["TestLog3"], "test");
+
+ let log = Log.repository.getLogger("TestLog3");
+ let [capp, dapp, fapps] = getAppenders(log);
+
+ // console and dump appenders should be "trace" as it is more verbose than
+ // "debug"
+ equal(capp.level, Log.Level.Trace);
+ equal(dapp.level, Log.Level.Trace);
+
+ // Set the prefs on the -1 branch to "Error" - it should then end up with
+ // "Debug" from the -2 branch.
+ Services.prefs.setCharPref("log-manager-1.test.log.appender.console", "Error");
+ Services.prefs.setCharPref("log-manager-1.test.log.appender.dump", "Error");
+ Services.prefs.setCharPref("log-manager-1.test.log.appender.file.level", "Error");
+
+ equal(capp.level, Log.Level.Debug);
+ equal(dapp.level, Log.Level.Debug);
+
+ lm1.finalize();
+ lm2.finalize();
+});
+
+// A little helper to test what log files exist. We expect exactly zero (if
+// prefix is null) or exactly one with the specified prefix.
+function checkLogFile(prefix) {
+ let logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
+ let entries = logsdir.directoryEntries;
+ if (!prefix) {
+ // expecting no files.
+ ok(!entries.hasMoreElements());
+ } else {
+ // expecting 1 file.
+ ok(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ equal(logfile.leafName.slice(-4), ".txt");
+ ok(logfile.leafName.startsWith(prefix + "-test-"), logfile.leafName);
+ // and remove it ready for the next check.
+ logfile.remove(false);
+ }
+}
+
+// Test that we correctly write error logs by default
+add_task(function* test_logFileErrorDefault() {
+ let lm = new LogManager("log-manager.test.", ["TestLog2"], "test");
+
+ let log = Log.repository.getLogger("TestLog2");
+ log.error("an error message");
+ yield lm.resetFileLog(lm.REASON_ERROR);
+ // One error log file exists.
+ checkLogFile("error");
+
+ lm.finalize();
+});
+
+// Test that we correctly write success logs.
+add_task(function* test_logFileSuccess() {
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnError", false);
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnSuccess", false);
+
+ let lm = new LogManager("log-manager.test.", ["TestLog2"], "test");
+
+ let log = Log.repository.getLogger("TestLog2");
+ log.info("an info message");
+ yield lm.resetFileLog();
+ // Zero log files exist.
+ checkLogFile(null);
+
+ // Reset logOnSuccess and do it again - log should appear.
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnSuccess", true);
+ log.info("an info message");
+ yield lm.resetFileLog();
+
+ checkLogFile("success");
+
+ // Now test with no "reason" specified and no "error" record.
+ log.info("an info message");
+ yield lm.resetFileLog();
+ // should get a "success" entry.
+ checkLogFile("success");
+
+ // With no "reason" and an error record - should get no success log.
+ log.error("an error message");
+ yield lm.resetFileLog();
+ // should get no entry
+ checkLogFile(null);
+
+ // And finally now with no error, to ensure that the fact we had an error
+ // previously doesn't persist after the .resetFileLog call.
+ log.info("an info message");
+ yield lm.resetFileLog();
+ checkLogFile("success");
+
+ lm.finalize();
+});
+
+// Test that we correctly write error logs.
+add_task(function* test_logFileError() {
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnError", false);
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnSuccess", false);
+
+ let lm = new LogManager("log-manager.test.", ["TestLog2"], "test");
+
+ let log = Log.repository.getLogger("TestLog2");
+ log.info("an info message");
+ let reason = yield lm.resetFileLog();
+ Assert.equal(reason, null, "null returned when no file created.");
+ // Zero log files exist.
+ checkLogFile(null);
+
+ // Reset logOnSuccess - success logs should appear if no error records.
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnSuccess", true);
+ log.info("an info message");
+ reason = yield lm.resetFileLog();
+ Assert.equal(reason, lm.SUCCESS_LOG_WRITTEN);
+ checkLogFile("success");
+
+ // Set logOnError and unset logOnSuccess - error logs should appear.
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnSuccess", false);
+ Services.prefs.setBoolPref("log-manager.test.log.appender.file.logOnError", true);
+ log.error("an error message");
+ reason = yield lm.resetFileLog();
+ Assert.equal(reason, lm.ERROR_LOG_WRITTEN);
+ checkLogFile("error");
+
+ // Now test with no "error" record.
+ log.info("an info message");
+ reason = yield lm.resetFileLog();
+ // should get no file
+ Assert.equal(reason, null);
+ checkLogFile(null);
+
+ // With an error record we should get an error log.
+ log.error("an error message");
+ reason = yield lm.resetFileLog();
+ // should get en error log
+ Assert.equal(reason, lm.ERROR_LOG_WRITTEN);
+ checkLogFile("error");
+
+ // And finally now with success, to ensure that the fact we had an error
+ // previously doesn't persist after the .resetFileLog call.
+ log.info("an info message");
+ yield lm.resetFileLog();
+ checkLogFile(null);
+
+ lm.finalize();
+});
diff --git a/services/common/tests/unit/test_observers.js b/services/common/tests/unit/test_observers.js
new file mode 100644
index 000000000..f11e83d5d
--- /dev/null
+++ b/services/common/tests/unit/test_observers.js
@@ -0,0 +1,84 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Components.utils.import("resource://services-common/observers.js");
+
+var gSubject = {};
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_function_observer() {
+ let foo = false;
+
+ let onFoo = function(subject, data) {
+ foo = !foo;
+ do_check_eq(subject, gSubject);
+ do_check_eq(data, "some data");
+ };
+
+ Observers.add("foo", onFoo);
+ Observers.notify("foo", gSubject, "some data");
+
+ // The observer was notified after being added.
+ do_check_true(foo);
+
+ Observers.remove("foo", onFoo);
+ Observers.notify("foo");
+
+ // The observer was not notified after being removed.
+ do_check_true(foo);
+
+ run_next_test();
+});
+
+add_test(function test_method_observer() {
+ let obj = {
+ foo: false,
+ onFoo: function(subject, data) {
+ this.foo = !this.foo;
+ do_check_eq(subject, gSubject);
+ do_check_eq(data, "some data");
+ }
+ };
+
+ // The observer is notified after being added.
+ Observers.add("foo", obj.onFoo, obj);
+ Observers.notify("foo", gSubject, "some data");
+ do_check_true(obj.foo);
+
+ // The observer is not notified after being removed.
+ Observers.remove("foo", obj.onFoo, obj);
+ Observers.notify("foo");
+ do_check_true(obj.foo);
+
+ run_next_test();
+});
+
+add_test(function test_object_observer() {
+ let obj = {
+ foo: false,
+ observe: function(subject, topic, data) {
+ this.foo = !this.foo;
+
+ do_check_eq(subject, gSubject);
+ do_check_eq(topic, "foo");
+ do_check_eq(data, "some data");
+ }
+ };
+
+ Observers.add("foo", obj);
+ Observers.notify("foo", gSubject, "some data");
+
+ // The observer is notified after being added.
+ do_check_true(obj.foo);
+
+ Observers.remove("foo", obj);
+ Observers.notify("foo");
+
+ // The observer is not notified after being removed.
+ do_check_true(obj.foo);
+
+ run_next_test();
+});
diff --git a/services/common/tests/unit/test_restrequest.js b/services/common/tests/unit/test_restrequest.js
new file mode 100644
index 000000000..162e0f517
--- /dev/null
+++ b/services/common/tests/unit/test_restrequest.js
@@ -0,0 +1,873 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/NetUtil.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ Log.repository.getLogger("Services.Common.RESTRequest").level =
+ Log.Level.Trace;
+ initTestLogging("Trace");
+
+ run_next_test();
+}
+
+/**
+ * Initializing a RESTRequest with an invalid URI throws
+ * NS_ERROR_MALFORMED_URI.
+ */
+add_test(function test_invalid_uri() {
+ do_check_throws(function() {
+ new RESTRequest("an invalid URI");
+ }, Cr.NS_ERROR_MALFORMED_URI);
+ run_next_test();
+});
+
+/**
+ * Verify initial values for attributes.
+ */
+add_test(function test_attributes() {
+ let uri = "http://foo.com/bar/baz";
+ let request = new RESTRequest(uri);
+
+ do_check_true(request.uri instanceof Ci.nsIURI);
+ do_check_eq(request.uri.spec, uri);
+ do_check_eq(request.response, null);
+ do_check_eq(request.status, request.NOT_SENT);
+ let expectedLoadFlags = Ci.nsIRequest.LOAD_BYPASS_CACHE |
+ Ci.nsIRequest.INHIBIT_CACHING |
+ Ci.nsIRequest.LOAD_ANONYMOUS;
+ do_check_eq(request.loadFlags, expectedLoadFlags);
+
+ run_next_test();
+});
+
+/**
+ * Verify that a proxy auth redirect doesn't break us. This has to be the first
+ * request made in the file!
+ */
+add_test(function test_proxy_auth_redirect() {
+ let pacFetched = false;
+ function pacHandler(metadata, response) {
+ pacFetched = true;
+ let body = 'function FindProxyForURL(url, host) { return "DIRECT"; }';
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/x-ns-proxy-autoconfig", false);
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ let fetched = false;
+ function original(metadata, response) {
+ fetched = true;
+ let body = "TADA!";
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ let server = httpd_setup({
+ "/original": original,
+ "/pac3": pacHandler
+ });
+ PACSystemSettings.PACURI = server.baseURI + "/pac3";
+ installFakePAC();
+
+ let res = new RESTRequest(server.baseURI + "/original");
+ res.get(function (error) {
+ do_check_true(pacFetched);
+ do_check_true(fetched);
+ do_check_true(!error);
+ do_check_true(this.response.success);
+ do_check_eq("TADA!", this.response.body);
+ uninstallFakePAC();
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * Ensure that failures that cause asyncOpen to throw
+ * result in callbacks being invoked.
+ * Bug 826086.
+ */
+add_test(function test_forbidden_port() {
+ let request = new RESTRequest("http://localhost:6000/");
+ request.get(function(error) {
+ if (!error) {
+ do_throw("Should have got an error.");
+ }
+ do_check_eq(error.result, Components.results.NS_ERROR_PORT_ACCESS_NOT_ALLOWED);
+ run_next_test();
+ });
+});
+
+/**
+ * Demonstrate API short-hand: create a request and dispatch it immediately.
+ */
+add_test(function test_simple_get() {
+ let handler = httpd_handler(200, "OK", "Huzzah!");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource").get(function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "Huzzah!");
+
+ server.stop(run_next_test);
+ });
+ do_check_eq(request.status, request.SENT);
+ do_check_eq(request.method, "GET");
+});
+
+/**
+ * Test HTTP GET with all bells and whistles.
+ */
+add_test(function test_get() {
+ let handler = httpd_handler(200, "OK", "Huzzah!");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+ do_check_eq(request.status, request.NOT_SENT);
+
+ request.onProgress = request.onComplete = function () {
+ do_throw("This function should have been overwritten!");
+ };
+
+ let onProgress_called = false;
+ function onProgress() {
+ onProgress_called = true;
+ do_check_eq(this.status, request.IN_PROGRESS);
+ do_check_true(this.response.body.length > 0);
+
+ do_check_true(!!(this.channel.loadFlags & Ci.nsIRequest.LOAD_BYPASS_CACHE));
+ do_check_true(!!(this.channel.loadFlags & Ci.nsIRequest.INHIBIT_CACHING));
+ };
+
+ function onComplete(error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "Huzzah!");
+ do_check_eq(handler.request.method, "GET");
+
+ do_check_true(onProgress_called);
+ CommonUtils.nextTick(function () {
+ do_check_eq(request.onComplete, null);
+ do_check_eq(request.onProgress, null);
+ server.stop(run_next_test);
+ });
+ };
+
+ do_check_eq(request.get(onComplete, onProgress), request);
+ do_check_eq(request.status, request.SENT);
+ do_check_eq(request.method, "GET");
+ do_check_throws(function () {
+ request.get();
+ });
+});
+
+/**
+ * Test HTTP GET with UTF-8 content, and custom Content-Type.
+ */
+add_test(function test_get_utf8() {
+ let response = "Hello World or Καλημέρα κόσμε or こんにちは 世界";
+
+ let contentType = "text/plain";
+ let charset = true;
+ let charsetSuffix = "; charset=UTF-8";
+
+ let server = httpd_setup({"/resource": function(req, res) {
+ res.setStatusLine(req.httpVersion, 200, "OK");
+ res.setHeader("Content-Type", contentType + (charset ? charsetSuffix : ""));
+
+ let converter = Cc["@mozilla.org/intl/converter-output-stream;1"]
+ .createInstance(Ci.nsIConverterOutputStream);
+ converter.init(res.bodyOutputStream, "UTF-8", 0, 0x0000);
+ converter.writeString(response);
+ converter.close();
+ }});
+
+ // Check if charset in Content-Type is propertly interpreted.
+ let request1 = new RESTRequest(server.baseURI + "/resource");
+ request1.get(function(error) {
+ do_check_null(error);
+
+ do_check_eq(request1.response.status, 200);
+ do_check_eq(request1.response.body, response);
+ do_check_eq(request1.response.headers["content-type"],
+ contentType + charsetSuffix);
+
+ // Check that we default to UTF-8 if Content-Type doesn't have a charset.
+ charset = false;
+ let request2 = new RESTRequest(server.baseURI + "/resource");
+ request2.get(function(error) {
+ do_check_null(error);
+
+ do_check_eq(request2.response.status, 200);
+ do_check_eq(request2.response.body, response);
+ do_check_eq(request2.response.headers["content-type"], contentType);
+ do_check_eq(request2.response.charset, "utf-8");
+
+ server.stop(run_next_test);
+ });
+ });
+});
+
+/**
+ * Test HTTP POST data is encoded as UTF-8 by default.
+ */
+add_test(function test_post_utf8() {
+ // We setup a handler that responds with exactly what it received.
+ // Given we've already tested above that responses are correctly utf-8
+ // decoded we can surmise that the correct response coming back means the
+ // input must also have been encoded.
+ let server = httpd_setup({"/echo": function(req, res) {
+ res.setStatusLine(req.httpVersion, 200, "OK");
+ res.setHeader("Content-Type", req.getHeader("content-type"));
+ // Get the body as bytes and write them back without touching them
+ let sis = Cc["@mozilla.org/scriptableinputstream;1"]
+ .createInstance(Ci.nsIScriptableInputStream);
+ sis.init(req.bodyInputStream);
+ let body = sis.read(sis.available());
+ sis.close()
+ res.write(body);
+ }});
+
+ let data = {copyright: "\xa9"}; // \xa9 is the copyright symbol
+ let request1 = new RESTRequest(server.baseURI + "/echo");
+ request1.post(data, function(error) {
+ do_check_null(error);
+
+ do_check_eq(request1.response.status, 200);
+ deepEqual(JSON.parse(request1.response.body), data);
+ do_check_eq(request1.response.headers["content-type"],
+ "application/json; charset=utf-8")
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * Test more variations of charset handling.
+ */
+add_test(function test_charsets() {
+ let response = "Hello World, I can't speak Russian";
+
+ let contentType = "text/plain";
+ let charset = true;
+ let charsetSuffix = "; charset=us-ascii";
+
+ let server = httpd_setup({"/resource": function(req, res) {
+ res.setStatusLine(req.httpVersion, 200, "OK");
+ res.setHeader("Content-Type", contentType + (charset ? charsetSuffix : ""));
+
+ let converter = Cc["@mozilla.org/intl/converter-output-stream;1"]
+ .createInstance(Ci.nsIConverterOutputStream);
+ converter.init(res.bodyOutputStream, "us-ascii", 0, 0x0000);
+ converter.writeString(response);
+ converter.close();
+ }});
+
+ // Check that provided charset overrides hint.
+ let request1 = new RESTRequest(server.baseURI + "/resource");
+ request1.charset = "not-a-charset";
+ request1.get(function(error) {
+ do_check_null(error);
+
+ do_check_eq(request1.response.status, 200);
+ do_check_eq(request1.response.body, response);
+ do_check_eq(request1.response.headers["content-type"],
+ contentType + charsetSuffix);
+ do_check_eq(request1.response.charset, "us-ascii");
+
+ // Check that hint is used if Content-Type doesn't have a charset.
+ charset = false;
+ let request2 = new RESTRequest(server.baseURI + "/resource");
+ request2.charset = "us-ascii";
+ request2.get(function(error) {
+ do_check_null(error);
+
+ do_check_eq(request2.response.status, 200);
+ do_check_eq(request2.response.body, response);
+ do_check_eq(request2.response.headers["content-type"], contentType);
+ do_check_eq(request2.response.charset, "us-ascii");
+
+ server.stop(run_next_test);
+ });
+ });
+});
+
+/**
+ * Used for testing PATCH/PUT/POST methods.
+ */
+function check_posting_data(method) {
+ let funcName = method.toLowerCase();
+ let handler = httpd_handler(200, "OK", "Got it!");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+ do_check_eq(request.status, request.NOT_SENT);
+
+ request.onProgress = request.onComplete = function () {
+ do_throw("This function should have been overwritten!");
+ };
+
+ let onProgress_called = false;
+ function onProgress() {
+ onProgress_called = true;
+ do_check_eq(this.status, request.IN_PROGRESS);
+ do_check_true(this.response.body.length > 0);
+ };
+
+ function onComplete(error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "Got it!");
+
+ do_check_eq(handler.request.method, method);
+ do_check_eq(handler.request.body, "Hullo?");
+ do_check_eq(handler.request.getHeader("Content-Type"), "text/plain");
+
+ do_check_true(onProgress_called);
+ CommonUtils.nextTick(function () {
+ do_check_eq(request.onComplete, null);
+ do_check_eq(request.onProgress, null);
+ server.stop(run_next_test);
+ });
+ };
+
+ do_check_eq(request[funcName]("Hullo?", onComplete, onProgress), request);
+ do_check_eq(request.status, request.SENT);
+ do_check_eq(request.method, method);
+ do_check_throws(function () {
+ request[funcName]("Hai!");
+ });
+}
+
+/**
+ * Test HTTP PATCH with a simple string argument and default Content-Type.
+ */
+add_test(function test_patch() {
+ check_posting_data("PATCH");
+});
+
+/**
+ * Test HTTP PUT with a simple string argument and default Content-Type.
+ */
+add_test(function test_put() {
+ check_posting_data("PUT");
+});
+
+/**
+ * Test HTTP POST with a simple string argument and default Content-Type.
+ */
+add_test(function test_post() {
+ check_posting_data("POST");
+});
+
+/**
+ * Test HTTP DELETE.
+ */
+add_test(function test_delete() {
+ let handler = httpd_handler(200, "OK", "Got it!");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+ do_check_eq(request.status, request.NOT_SENT);
+
+ request.onProgress = request.onComplete = function () {
+ do_throw("This function should have been overwritten!");
+ };
+
+ let onProgress_called = false;
+ function onProgress() {
+ onProgress_called = true;
+ do_check_eq(this.status, request.IN_PROGRESS);
+ do_check_true(this.response.body.length > 0);
+ };
+
+ function onComplete(error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "Got it!");
+ do_check_eq(handler.request.method, "DELETE");
+
+ do_check_true(onProgress_called);
+ CommonUtils.nextTick(function () {
+ do_check_eq(request.onComplete, null);
+ do_check_eq(request.onProgress, null);
+ server.stop(run_next_test);
+ });
+ };
+
+ do_check_eq(request.delete(onComplete, onProgress), request);
+ do_check_eq(request.status, request.SENT);
+ do_check_eq(request.method, "DELETE");
+ do_check_throws(function () {
+ request.delete();
+ });
+});
+
+/**
+ * Test an HTTP response with a non-200 status code.
+ */
+add_test(function test_get_404() {
+ let handler = httpd_handler(404, "Not Found", "Cannae find it!");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_false(this.response.success);
+ do_check_eq(this.response.status, 404);
+ do_check_eq(this.response.body, "Cannae find it!");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * The 'data' argument to PUT, if not a string already, is automatically
+ * stringified as JSON.
+ */
+add_test(function test_put_json() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let sample_data = {
+ some: "sample_data",
+ injson: "format",
+ number: 42
+ };
+ let request = new RESTRequest(server.baseURI + "/resource");
+ request.put(sample_data, function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ do_check_eq(handler.request.method, "PUT");
+ do_check_eq(handler.request.body, JSON.stringify(sample_data));
+ do_check_eq(handler.request.getHeader("Content-Type"), "application/json; charset=utf-8");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * The 'data' argument to POST, if not a string already, is automatically
+ * stringified as JSON.
+ */
+add_test(function test_post_json() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let sample_data = {
+ some: "sample_data",
+ injson: "format",
+ number: 42
+ };
+ let request = new RESTRequest(server.baseURI + "/resource");
+ request.post(sample_data, function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ do_check_eq(handler.request.method, "POST");
+ do_check_eq(handler.request.body, JSON.stringify(sample_data));
+ do_check_eq(handler.request.getHeader("Content-Type"), "application/json; charset=utf-8");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * The content-type will be text/plain without a charset if the 'data' argument
+ * to POST is already a string.
+ */
+add_test(function test_post_json() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let sample_data = "hello";
+ let request = new RESTRequest(server.baseURI + "/resource");
+ request.post(sample_data, function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ do_check_eq(handler.request.method, "POST");
+ do_check_eq(handler.request.body, sample_data);
+ do_check_eq(handler.request.getHeader("Content-Type"), "text/plain");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * HTTP PUT with a custom Content-Type header.
+ */
+add_test(function test_put_override_content_type() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+ request.setHeader("Content-Type", "application/lolcat");
+ request.put("O HAI!!1!", function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ do_check_eq(handler.request.method, "PUT");
+ do_check_eq(handler.request.body, "O HAI!!1!");
+ do_check_eq(handler.request.getHeader("Content-Type"), "application/lolcat");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * HTTP POST with a custom Content-Type header.
+ */
+add_test(function test_post_override_content_type() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+ request.setHeader("Content-Type", "application/lolcat");
+ request.post("O HAI!!1!", function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.status, this.COMPLETED);
+ do_check_true(this.response.success);
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ do_check_eq(handler.request.method, "POST");
+ do_check_eq(handler.request.body, "O HAI!!1!");
+ do_check_eq(handler.request.getHeader("Content-Type"), "application/lolcat");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * No special headers are sent by default on a GET request.
+ */
+add_test(function test_get_no_headers() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let ignore_headers = ["host", "user-agent", "accept", "accept-language",
+ "accept-encoding", "accept-charset", "keep-alive",
+ "connection", "pragma", "cache-control",
+ "content-length"];
+
+ new RESTRequest(server.baseURI + "/resource").get(function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ let server_headers = handler.request.headers;
+ while (server_headers.hasMoreElements()) {
+ let header = server_headers.getNext().toString();
+ if (ignore_headers.indexOf(header) == -1) {
+ do_throw("Got unexpected header!");
+ }
+ }
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * Test changing the URI after having created the request.
+ */
+add_test(function test_changing_uri() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest("http://localhost:1234/the-wrong-resource");
+ request.uri = CommonUtils.makeURI(server.baseURI + "/resource");
+ request.get(function (error) {
+ do_check_eq(error, null);
+ do_check_eq(this.response.status, 200);
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * Test setting HTTP request headers.
+ */
+add_test(function test_request_setHeader() {
+ let handler = httpd_handler(200, "OK");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+
+ request.setHeader("X-What-Is-Weave", "awesome");
+ request.setHeader("X-WHAT-is-Weave", "more awesomer");
+ request.setHeader("Another-Header", "Hello World");
+
+ request.get(function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ do_check_eq(handler.request.getHeader("X-What-Is-Weave"), "more awesomer");
+ do_check_eq(handler.request.getHeader("another-header"), "Hello World");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * Test receiving HTTP response headers.
+ */
+add_test(function test_response_headers() {
+ function handler(request, response) {
+ response.setHeader("X-What-Is-Weave", "awesome");
+ response.setHeader("Another-Header", "Hello World");
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ }
+ let server = httpd_setup({"/resource": handler});
+ let request = new RESTRequest(server.baseURI + "/resource");
+
+ request.get(function (error) {
+ do_check_eq(error, null);
+
+ do_check_eq(this.response.status, 200);
+ do_check_eq(this.response.body, "");
+
+ do_check_eq(this.response.headers["x-what-is-weave"], "awesome");
+ do_check_eq(this.response.headers["another-header"], "Hello World");
+
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * The onComplete() handler gets called in case of any network errors
+ * (e.g. NS_ERROR_CONNECTION_REFUSED).
+ */
+add_test(function test_connection_refused() {
+ let request = new RESTRequest("http://localhost:1234/resource");
+ request.onProgress = function onProgress() {
+ do_throw("Shouldn't have called request.onProgress()!");
+ };
+ request.get(function (error) {
+ do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
+ do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
+ do_check_eq(this.status, this.COMPLETED);
+ run_next_test();
+ });
+ do_check_eq(request.status, request.SENT);
+});
+
+/**
+ * Abort a request that just sent off.
+ */
+add_test(function test_abort() {
+ function handler() {
+ do_throw("Shouldn't have gotten here!");
+ }
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+
+ // Aborting a request that hasn't been sent yet is pointless and will throw.
+ do_check_throws(function () {
+ request.abort();
+ });
+
+ request.onProgress = request.onComplete = function () {
+ do_throw("Shouldn't have gotten here!");
+ };
+ request.get();
+ request.abort();
+
+ // Aborting an already aborted request is pointless and will throw.
+ do_check_throws(function () {
+ request.abort();
+ });
+
+ do_check_eq(request.status, request.ABORTED);
+ CommonUtils.nextTick(function () {
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * A non-zero 'timeout' property specifies the amount of seconds to wait after
+ * channel activity until the request is automatically canceled.
+ */
+add_test(function test_timeout() {
+ let server = new HttpServer();
+ let server_connection;
+ server._handler.handleResponse = function(connection) {
+ // This is a handler that doesn't do anything, just keeps the connection
+ // open, thereby mimicking a timing out connection. We keep a reference to
+ // the open connection for later so it can be properly disposed of. That's
+ // why you really only want to make one HTTP request to this server ever.
+ server_connection = connection;
+ };
+ server.start();
+ let identity = server.identity;
+ let uri = identity.primaryScheme + "://" + identity.primaryHost + ":" +
+ identity.primaryPort;
+
+ let request = new RESTRequest(uri + "/resource");
+ request.timeout = 0.1; // 100 milliseconds
+ request.get(function (error) {
+ do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
+ do_check_eq(this.status, this.ABORTED);
+
+ // server_connection is undefined on the Android emulator for reasons
+ // unknown. Yet, we still get here. If this test is refactored, we should
+ // investigate the reason why the above callback is behaving differently.
+ if (server_connection) {
+ _("Closing connection.");
+ server_connection.close();
+ }
+
+ _("Shutting down server.");
+ server.stop(run_next_test);
+ });
+});
+
+/**
+ * An exception thrown in 'onProgress' propagates to the 'onComplete' handler.
+ */
+add_test(function test_exception_in_onProgress() {
+ let handler = httpd_handler(200, "OK", "Foobar");
+ let server = httpd_setup({"/resource": handler});
+
+ let request = new RESTRequest(server.baseURI + "/resource");
+ request.onProgress = function onProgress() {
+ it.does.not.exist();
+ };
+ request.get(function onComplete(error) {
+ do_check_eq(error, "ReferenceError: it is not defined");
+ do_check_eq(this.status, this.ABORTED);
+
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_new_channel() {
+ _("Ensure a redirect to a new channel is handled properly.");
+
+ function checkUA(metadata) {
+ let ua = metadata.getHeader("User-Agent");
+ _("User-Agent is " + ua);
+ do_check_eq("foo bar", ua);
+ }
+
+ let redirectRequested = false;
+ let redirectURL;
+ function redirectHandler(metadata, response) {
+ checkUA(metadata);
+ redirectRequested = true;
+
+ let body = "Redirecting";
+ response.setStatusLine(metadata.httpVersion, 307, "TEMPORARY REDIRECT");
+ response.setHeader("Location", redirectURL);
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ let resourceRequested = false;
+ function resourceHandler(metadata, response) {
+ checkUA(metadata);
+ resourceRequested = true;
+
+ let body = "Test";
+ response.setHeader("Content-Type", "text/plain");
+ response.bodyOutputStream.write(body, body.length);
+ }
+
+ let server1 = httpd_setup({"/redirect": redirectHandler});
+ let server2 = httpd_setup({"/resource": resourceHandler});
+ redirectURL = server2.baseURI + "/resource";
+
+ function advance() {
+ server1.stop(function () {
+ server2.stop(run_next_test);
+ });
+ }
+
+ let request = new RESTRequest(server1.baseURI + "/redirect");
+ request.setHeader("User-Agent", "foo bar");
+
+ // Swizzle in our own fakery, because this redirect is neither
+ // internal nor URI-preserving. RESTRequest's policy is to only
+ // copy headers under certain circumstances.
+ let protoMethod = request.shouldCopyOnRedirect;
+ request.shouldCopyOnRedirect = function wrapped(o, n, f) {
+ // Check the default policy.
+ do_check_false(protoMethod.call(this, o, n, f));
+ return true;
+ };
+
+ request.get(function onComplete(error) {
+ let response = this.response;
+
+ do_check_eq(200, response.status);
+ do_check_eq("Test", response.body);
+ do_check_true(redirectRequested);
+ do_check_true(resourceRequested);
+
+ advance();
+ });
+});
+
+add_test(function test_not_sending_cookie() {
+ function handler(metadata, response) {
+ let body = "COOKIE!";
+ response.setStatusLine(metadata.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(body, body.length);
+ do_check_false(metadata.hasHeader("Cookie"));
+ }
+ let server = httpd_setup({"/test": handler});
+
+ let cookieSer = Cc["@mozilla.org/cookieService;1"]
+ .getService(Ci.nsICookieService);
+ let uri = CommonUtils.makeURI(server.baseURI);
+ cookieSer.setCookieString(uri, null, "test=test; path=/;", null);
+
+ let res = new RESTRequest(server.baseURI + "/test");
+ res.get(function (error) {
+ do_check_null(error);
+ do_check_true(this.response.success);
+ do_check_eq("COOKIE!", this.response.body);
+ server.stop(run_next_test);
+ });
+});
+
diff --git a/services/common/tests/unit/test_storage_adapter.js b/services/common/tests/unit/test_storage_adapter.js
new file mode 100644
index 000000000..dc1aa807c
--- /dev/null
+++ b/services/common/tests/unit/test_storage_adapter.js
@@ -0,0 +1,269 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/kinto-offline-client.js");
+
+// set up what we need to make storage adapters
+const Kinto = loadKinto();
+const FirefoxAdapter = Kinto.adapters.FirefoxAdapter;
+const kintoFilename = "kinto.sqlite";
+
+let gFirefoxAdapter = null;
+
+function do_get_kinto_adapter() {
+ if (gFirefoxAdapter == null) {
+ gFirefoxAdapter = new FirefoxAdapter("test");
+ }
+ return gFirefoxAdapter;
+}
+
+function do_get_kinto_db() {
+ let profile = do_get_profile();
+ let kintoDB = profile.clone();
+ kintoDB.append(kintoFilename);
+ return kintoDB;
+}
+
+function cleanup_kinto() {
+ add_test(function cleanup_kinto_files(){
+ let kintoDB = do_get_kinto_db();
+ // clean up the db
+ kintoDB.remove(false);
+ // force re-creation of the adapter
+ gFirefoxAdapter = null;
+ run_next_test();
+ });
+}
+
+function test_collection_operations() {
+ add_task(function* test_kinto_clear() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ yield adapter.clear();
+ yield adapter.close();
+ });
+
+ // test creating new records... and getting them again
+ add_task(function* test_kinto_create_new_get_existing() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ let record = {id:"test-id", foo:"bar"};
+ yield adapter.execute((transaction) => transaction.create(record));
+ let newRecord = yield adapter.get("test-id");
+ // ensure the record is the same as when it was added
+ deepEqual(record, newRecord);
+ yield adapter.close();
+ });
+
+ // test removing records
+ add_task(function* test_kinto_can_remove_some_records() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ // create a second record
+ let record = {id:"test-id-2", foo:"baz"};
+ yield adapter.execute((transaction) => transaction.create(record));
+ let newRecord = yield adapter.get("test-id-2");
+ deepEqual(record, newRecord);
+ // delete the record
+ yield adapter.execute((transaction) => transaction.delete(record.id));
+ newRecord = yield adapter.get(record.id);
+ // ... and ensure it's no longer there
+ do_check_eq(newRecord, undefined);
+ // ensure the other record still exists
+ newRecord = yield adapter.get("test-id");
+ do_check_neq(newRecord, undefined);
+ yield adapter.close();
+ });
+
+ // test getting records that don't exist
+ add_task(function* test_kinto_get_non_existant() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ // Kinto expects adapters to either:
+ let newRecord = yield adapter.get("missing-test-id");
+ // resolve with an undefined record
+ do_check_eq(newRecord, undefined);
+ yield adapter.close();
+ });
+
+ // test updating records... and getting them again
+ add_task(function* test_kinto_update_get_existing() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ let originalRecord = {id:"test-id", foo:"bar"};
+ let updatedRecord = {id:"test-id", foo:"baz"};
+ yield adapter.clear();
+ yield adapter.execute((transaction) => transaction.create(originalRecord));
+ yield adapter.execute((transaction) => transaction.update(updatedRecord));
+ // ensure the record exists
+ let newRecord = yield adapter.get("test-id");
+ // ensure the record is the same as when it was added
+ deepEqual(updatedRecord, newRecord);
+ yield adapter.close();
+ });
+
+ // test listing records
+ add_task(function* test_kinto_list() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ let originalRecord = {id:"test-id-1", foo:"bar"};
+ let records = yield adapter.list();
+ do_check_eq(records.length, 1);
+ yield adapter.execute((transaction) => transaction.create(originalRecord));
+ records = yield adapter.list();
+ do_check_eq(records.length, 2);
+ yield adapter.close();
+ });
+
+ // test aborting transaction
+ add_task(function* test_kinto_aborting_transaction() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ yield adapter.clear();
+ let record = {id: 1, foo: "bar"};
+ let error = null;
+ try {
+ yield adapter.execute((transaction) => {
+ transaction.create(record);
+ throw new Error("unexpected");
+ });
+ } catch (e) {
+ error = e;
+ }
+ do_check_neq(error, null);
+ records = yield adapter.list();
+ do_check_eq(records.length, 0);
+ yield adapter.close();
+ });
+
+ // test save and get last modified
+ add_task(function* test_kinto_last_modified() {
+ const initialValue = 0;
+ const intendedValue = 12345678;
+
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ let lastModified = yield adapter.getLastModified();
+ do_check_eq(lastModified, initialValue);
+ let result = yield adapter.saveLastModified(intendedValue);
+ do_check_eq(result, intendedValue);
+ lastModified = yield adapter.getLastModified();
+ do_check_eq(lastModified, intendedValue);
+
+ // test saveLastModified parses values correctly
+ result = yield adapter.saveLastModified(" " + intendedValue + " blah");
+ // should resolve with the parsed int
+ do_check_eq(result, intendedValue);
+ // and should have saved correctly
+ lastModified = yield adapter.getLastModified();
+ do_check_eq(lastModified, intendedValue);
+ yield adapter.close();
+ });
+
+ // test loadDump(records)
+ add_task(function* test_kinto_import_records() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ let record1 = {id: 1, foo: "bar"};
+ let record2 = {id: 2, foo: "baz"};
+ let impactedRecords = yield adapter.loadDump([
+ record1, record2
+ ]);
+ do_check_eq(impactedRecords.length, 2);
+ let newRecord1 = yield adapter.get("1");
+ // ensure the record is the same as when it was added
+ deepEqual(record1, newRecord1);
+ let newRecord2 = yield adapter.get("2");
+ // ensure the record is the same as when it was added
+ deepEqual(record2, newRecord2);
+ yield adapter.close();
+ });
+
+ add_task(function* test_kinto_import_records_should_override_existing() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ yield adapter.clear();
+ records = yield adapter.list();
+ do_check_eq(records.length, 0);
+ let impactedRecords = yield adapter.loadDump([
+ {id: 1, foo: "bar"},
+ {id: 2, foo: "baz"},
+ ]);
+ do_check_eq(impactedRecords.length, 2);
+ yield adapter.loadDump([
+ {id: 1, foo: "baz"},
+ {id: 3, foo: "bab"},
+ ]);
+ records = yield adapter.list();
+ do_check_eq(records.length, 3);
+ let newRecord1 = yield adapter.get("1");
+ deepEqual(newRecord1.foo, "baz");
+ yield adapter.close();
+ });
+
+ add_task(function* test_import_updates_lastModified() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ yield adapter.loadDump([
+ {id: 1, foo: "bar", last_modified: 1457896541},
+ {id: 2, foo: "baz", last_modified: 1458796542},
+ ]);
+ let lastModified = yield adapter.getLastModified();
+ do_check_eq(lastModified, 1458796542);
+ yield adapter.close();
+ });
+
+ add_task(function* test_import_preserves_older_lastModified() {
+ let adapter = do_get_kinto_adapter();
+ yield adapter.open();
+ yield adapter.saveLastModified(1458796543);
+
+ yield adapter.loadDump([
+ {id: 1, foo: "bar", last_modified: 1457896541},
+ {id: 2, foo: "baz", last_modified: 1458796542},
+ ]);
+ let lastModified = yield adapter.getLastModified();
+ do_check_eq(lastModified, 1458796543);
+ yield adapter.close();
+ });
+}
+
+// test kinto db setup and operations in various scenarios
+// test from scratch - no current existing database
+add_test(function test_db_creation() {
+ add_test(function test_create_from_scratch() {
+ // ensure the file does not exist in the profile
+ let kintoDB = do_get_kinto_db();
+ do_check_false(kintoDB.exists());
+ run_next_test();
+ });
+
+ test_collection_operations();
+
+ cleanup_kinto();
+ run_next_test();
+});
+
+// this is the closest we can get to a schema version upgrade at v1 - test an
+// existing database
+add_test(function test_creation_from_empty_db() {
+ add_test(function test_create_from_empty_db() {
+ // place an empty kinto db file in the profile
+ let profile = do_get_profile();
+ let kintoDB = do_get_kinto_db();
+
+ let emptyDB = do_get_file("test_storage_adapter/empty.sqlite");
+ emptyDB.copyTo(profile,kintoFilename);
+
+ run_next_test();
+ });
+
+ test_collection_operations();
+
+ cleanup_kinto();
+ run_next_test();
+});
+
+function run_test() {
+ run_next_test();
+}
diff --git a/services/common/tests/unit/test_storage_adapter/empty.sqlite b/services/common/tests/unit/test_storage_adapter/empty.sqlite
new file mode 100644
index 000000000..7f295b414
--- /dev/null
+++ b/services/common/tests/unit/test_storage_adapter/empty.sqlite
Binary files differ
diff --git a/services/common/tests/unit/test_storage_server.js b/services/common/tests/unit/test_storage_server.js
new file mode 100644
index 000000000..04b4dfbbb
--- /dev/null
+++ b/services/common/tests/unit/test_storage_server.js
@@ -0,0 +1,692 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://testing-common/services/common/storageserver.js");
+
+const DEFAULT_USER = "123";
+const DEFAULT_PASSWORD = "password";
+
+/**
+ * Helper function to prepare a RESTRequest against the server.
+ */
+function localRequest(server, path, user=DEFAULT_USER, password=DEFAULT_PASSWORD) {
+ _("localRequest: " + path);
+ let identity = server.server.identity;
+ let url = identity.primaryScheme + "://" + identity.primaryHost + ":" +
+ identity.primaryPort + path;
+ _("url: " + url);
+ let req = new RESTRequest(url);
+
+ let header = basic_auth_header(user, password);
+ req.setHeader("Authorization", header);
+ req.setHeader("Accept", "application/json");
+
+ return req;
+}
+
+/**
+ * Helper function to validate an HTTP response from the server.
+ */
+function validateResponse(response) {
+ do_check_true("x-timestamp" in response.headers);
+
+ if ("content-length" in response.headers) {
+ let cl = parseInt(response.headers["content-length"]);
+
+ if (cl != 0) {
+ do_check_true("content-type" in response.headers);
+ do_check_eq("application/json", response.headers["content-type"]);
+ }
+ }
+
+ if (response.status == 204 || response.status == 304) {
+ do_check_false("content-type" in response.headers);
+
+ if ("content-length" in response.headers) {
+ do_check_eq(response.headers["content-length"], "0");
+ }
+ }
+
+ if (response.status == 405) {
+ do_check_true("allow" in response.headers);
+ }
+}
+
+/**
+ * Helper function to synchronously wait for a response and validate it.
+ */
+function waitAndValidateResponse(cb, request) {
+ let error = cb.wait();
+
+ if (!error) {
+ validateResponse(request.response);
+ }
+
+ return error;
+}
+
+/**
+ * Helper function to synchronously perform a GET request.
+ *
+ * @return Error instance or null if no error.
+ */
+function doGetRequest(request) {
+ let cb = Async.makeSpinningCallback();
+ request.get(cb);
+
+ return waitAndValidateResponse(cb, request);
+}
+
+/**
+ * Helper function to synchronously perform a PUT request.
+ *
+ * @return Error instance or null if no error.
+ */
+function doPutRequest(request, data) {
+ let cb = Async.makeSpinningCallback();
+ request.put(data, cb);
+
+ return waitAndValidateResponse(cb, request);
+}
+
+/**
+ * Helper function to synchronously perform a DELETE request.
+ *
+ * @return Error or null if no error was encountered.
+ */
+function doDeleteRequest(request) {
+ let cb = Async.makeSpinningCallback();
+ request.delete(cb);
+
+ return waitAndValidateResponse(cb, request);
+}
+
+function run_test() {
+ Log.repository.getLogger("Services.Common.Test.StorageServer").level =
+ Log.Level.Trace;
+ initTestLogging();
+
+ run_next_test();
+}
+
+add_test(function test_creation() {
+ _("Ensure a simple server can be created.");
+
+ // Explicit callback for this one.
+ let server = new StorageServer({
+ __proto__: StorageServerCallback,
+ });
+ do_check_true(!!server);
+
+ server.start(-1, function () {
+ _("Started on " + server.port);
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_synchronous_start() {
+ _("Ensure starting using startSynchronous works.");
+
+ let server = new StorageServer();
+ server.startSynchronous();
+ server.stop(run_next_test);
+});
+
+add_test(function test_url_parsing() {
+ _("Ensure server parses URLs properly.");
+
+ let server = new StorageServer();
+
+ // Check that we can parse a BSO URI.
+ let parts = server.pathRE.exec("/2.0/12345/storage/crypto/keys");
+ let [all, version, user, first, rest] = parts;
+ do_check_eq(all, "/2.0/12345/storage/crypto/keys");
+ do_check_eq(version, "2.0");
+ do_check_eq(user, "12345");
+ do_check_eq(first, "storage");
+ do_check_eq(rest, "crypto/keys");
+ do_check_eq(null, server.pathRE.exec("/nothing/else"));
+
+ // Check that we can parse a collection URI.
+ parts = server.pathRE.exec("/2.0/123/storage/crypto");
+ [all, version, user, first, rest] = parts;
+ do_check_eq(all, "/2.0/123/storage/crypto");
+ do_check_eq(version, "2.0");
+ do_check_eq(user, "123");
+ do_check_eq(first, "storage");
+ do_check_eq(rest, "crypto");
+
+ // We don't allow trailing slash on storage URI.
+ parts = server.pathRE.exec("/2.0/1234/storage/");
+ do_check_eq(parts, undefined);
+
+ // storage alone is a valid request.
+ parts = server.pathRE.exec("/2.0/123456/storage");
+ [all, version, user, first, rest] = parts;
+ do_check_eq(all, "/2.0/123456/storage");
+ do_check_eq(version, "2.0");
+ do_check_eq(user, "123456");
+ do_check_eq(first, "storage");
+ do_check_eq(rest, undefined);
+
+ parts = server.storageRE.exec("storage");
+ let storage, collection, id;
+ [all, storage, collection, id] = parts;
+ do_check_eq(all, "storage");
+ do_check_eq(collection, undefined);
+
+ run_next_test();
+});
+
+add_test(function test_basic_http() {
+ let server = new StorageServer();
+ server.registerUser("345", "password");
+ do_check_true(server.userExists("345"));
+ server.startSynchronous();
+
+ _("Started on " + server.port);
+ do_check_eq(server.requestCount, 0);
+ let req = localRequest(server, "/2.0/storage/crypto/keys");
+ _("req is " + req);
+ req.get(function (err) {
+ do_check_eq(null, err);
+ do_check_eq(server.requestCount, 1);
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_info_collections() {
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+
+ let path = "/2.0/123/info/collections";
+
+ _("info/collections on empty server should be empty object.");
+ let request = localRequest(server, path, "123", "password");
+ let error = doGetRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 200);
+ do_check_eq(request.response.body, "{}");
+
+ _("Creating an empty collection should result in collection appearing.");
+ let coll = server.createCollection("123", "col1");
+ request = localRequest(server, path, "123", "password");
+ error = doGetRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 200);
+ let info = JSON.parse(request.response.body);
+ do_check_attribute_count(info, 1);
+ do_check_true("col1" in info);
+ do_check_eq(info.col1, coll.timestamp);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_get_existing() {
+ _("Ensure that BSO retrieval works.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.createContents("123", {
+ test: {"bso": {"foo": "bar"}}
+ });
+ server.startSynchronous();
+
+ let coll = server.user("123").collection("test");
+
+ let request = localRequest(server, "/2.0/123/storage/test/bso", "123",
+ "password");
+ let error = doGetRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 200);
+ do_check_eq(request.response.headers["content-type"], "application/json");
+ let bso = JSON.parse(request.response.body);
+ do_check_attribute_count(bso, 3);
+ do_check_eq(bso.id, "bso");
+ do_check_eq(bso.modified, coll.bso("bso").modified);
+ let payload = JSON.parse(bso.payload);
+ do_check_attribute_count(payload, 1);
+ do_check_eq(payload.foo, "bar");
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_percent_decoding() {
+ _("Ensure query string arguments with percent encoded are handled.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+
+ let coll = server.user("123").createCollection("test");
+ coll.insert("001", {foo: "bar"});
+ coll.insert("002", {bar: "foo"});
+
+ let request = localRequest(server, "/2.0/123/storage/test?ids=001%2C002",
+ "123", "password");
+ let error = doGetRequest(request);
+ do_check_null(error);
+ do_check_eq(request.response.status, 200);
+ let items = JSON.parse(request.response.body).items;
+ do_check_attribute_count(items, 2);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_404() {
+ _("Ensure the server responds with a 404 if a BSO does not exist.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.createContents("123", {
+ test: {}
+ });
+ server.startSynchronous();
+
+ let request = localRequest(server, "/2.0/123/storage/test/foo");
+ let error = doGetRequest(request);
+ do_check_eq(error, null);
+
+ do_check_eq(request.response.status, 404);
+ do_check_false("content-type" in request.response.headers);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_if_modified_since_304() {
+ _("Ensure the server responds properly to X-If-Modified-Since for BSOs.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.createContents("123", {
+ test: {bso: {foo: "bar"}}
+ });
+ server.startSynchronous();
+
+ let coll = server.user("123").collection("test");
+ do_check_neq(coll, null);
+
+ // Rewind clock just in case.
+ coll.timestamp -= 10000;
+ coll.bso("bso").modified -= 10000;
+
+ let request = localRequest(server, "/2.0/123/storage/test/bso",
+ "123", "password");
+ request.setHeader("X-If-Modified-Since", "" + server.serverTime());
+ let error = doGetRequest(request);
+ do_check_eq(null, error);
+
+ do_check_eq(request.response.status, 304);
+ do_check_false("content-type" in request.response.headers);
+
+ request = localRequest(server, "/2.0/123/storage/test/bso",
+ "123", "password");
+ request.setHeader("X-If-Modified-Since", "" + (server.serverTime() - 20000));
+ error = doGetRequest(request);
+ do_check_eq(null, error);
+ do_check_eq(request.response.status, 200);
+ do_check_eq(request.response.headers["content-type"], "application/json");
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_if_unmodified_since() {
+ _("Ensure X-If-Unmodified-Since works properly on BSOs.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.createContents("123", {
+ test: {bso: {foo: "bar"}}
+ });
+ server.startSynchronous();
+
+ let coll = server.user("123").collection("test");
+ do_check_neq(coll, null);
+
+ let time = coll.bso("bso").modified;
+
+ _("Ensure we get a 412 for specified times older than server time.");
+ let request = localRequest(server, "/2.0/123/storage/test/bso",
+ "123", "password");
+ request.setHeader("X-If-Unmodified-Since", time - 5000);
+ request.setHeader("Content-Type", "application/json");
+ let payload = JSON.stringify({"payload": "foobar"});
+ let error = doPutRequest(request, payload);
+ do_check_eq(null, error);
+ do_check_eq(request.response.status, 412);
+
+ _("Ensure we get a 204 if update goes through.");
+ request = localRequest(server, "/2.0/123/storage/test/bso",
+ "123", "password");
+ request.setHeader("Content-Type", "application/json");
+ request.setHeader("X-If-Unmodified-Since", time + 1);
+ error = doPutRequest(request, payload);
+ do_check_eq(null, error);
+ do_check_eq(request.response.status, 204);
+ do_check_true(coll.timestamp > time);
+
+ // Not sure why a client would send X-If-Unmodified-Since if a BSO doesn't
+ // exist. But, why not test it?
+ _("Ensure we get a 201 if creation goes through.");
+ request = localRequest(server, "/2.0/123/storage/test/none",
+ "123", "password");
+ request.setHeader("Content-Type", "application/json");
+ request.setHeader("X-If-Unmodified-Since", time);
+ error = doPutRequest(request, payload);
+ do_check_eq(null, error);
+ do_check_eq(request.response.status, 201);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_delete_not_exist() {
+ _("Ensure server behaves properly when deleting a BSO that does not exist.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.user("123").createCollection("empty");
+ server.startSynchronous();
+
+ server.callback.onItemDeleted = function onItemDeleted(username, collection,
+ id) {
+ do_throw("onItemDeleted should not have been called.");
+ };
+
+ let request = localRequest(server, "/2.0/123/storage/empty/nada",
+ "123", "password");
+ let error = doDeleteRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 404);
+ do_check_false("content-type" in request.response.headers);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_delete_exists() {
+ _("Ensure proper semantics when deleting a BSO that exists.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+
+ let coll = server.user("123").createCollection("test");
+ let bso = coll.insert("myid", {foo: "bar"});
+ let timestamp = coll.timestamp;
+
+ server.callback.onItemDeleted = function onDeleted(username, collection, id) {
+ delete server.callback.onItemDeleted;
+ do_check_eq(username, "123");
+ do_check_eq(collection, "test");
+ do_check_eq(id, "myid");
+ };
+
+ let request = localRequest(server, "/2.0/123/storage/test/myid",
+ "123", "password");
+ let error = doDeleteRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 204);
+ do_check_eq(coll.bsos().length, 0);
+ do_check_true(coll.timestamp > timestamp);
+
+ _("On next request the BSO should not exist.");
+ request = localRequest(server, "/2.0/123/storage/test/myid",
+ "123", "password");
+ error = doGetRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 404);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_delete_unmodified() {
+ _("Ensure X-If-Unmodified-Since works when deleting BSOs.");
+
+ let server = new StorageServer();
+ server.startSynchronous();
+ server.registerUser("123", "password");
+ let coll = server.user("123").createCollection("test");
+ let bso = coll.insert("myid", {foo: "bar"});
+
+ let modified = bso.modified;
+
+ _("Issuing a DELETE with an older time should fail.");
+ let path = "/2.0/123/storage/test/myid";
+ let request = localRequest(server, path, "123", "password");
+ request.setHeader("X-If-Unmodified-Since", modified - 1000);
+ let error = doDeleteRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 412);
+ do_check_false("content-type" in request.response.headers);
+ do_check_neq(coll.bso("myid"), null);
+
+ _("Issuing a DELETE with a newer time should work.");
+ request = localRequest(server, path, "123", "password");
+ request.setHeader("X-If-Unmodified-Since", modified + 1000);
+ error = doDeleteRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 204);
+ do_check_true(coll.bso("myid").deleted);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_collection_get_unmodified_since() {
+ _("Ensure conditional unmodified get on collection works when it should.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+ let collection = server.user("123").createCollection("testcoll");
+ collection.insert("bso0", {foo: "bar"});
+
+ let serverModified = collection.timestamp;
+
+ let request1 = localRequest(server, "/2.0/123/storage/testcoll",
+ "123", "password");
+ request1.setHeader("X-If-Unmodified-Since", serverModified);
+ let error = doGetRequest(request1);
+ do_check_null(error);
+ do_check_eq(request1.response.status, 200);
+
+ let request2 = localRequest(server, "/2.0/123/storage/testcoll",
+ "123", "password");
+ request2.setHeader("X-If-Unmodified-Since", serverModified - 1);
+ error = doGetRequest(request2);
+ do_check_null(error);
+ do_check_eq(request2.response.status, 412);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_bso_get_unmodified_since() {
+ _("Ensure conditional unmodified get on BSO works appropriately.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+ let collection = server.user("123").createCollection("testcoll");
+ let bso = collection.insert("bso0", {foo: "bar"});
+
+ let serverModified = bso.modified;
+
+ let request1 = localRequest(server, "/2.0/123/storage/testcoll/bso0",
+ "123", "password");
+ request1.setHeader("X-If-Unmodified-Since", serverModified);
+ let error = doGetRequest(request1);
+ do_check_null(error);
+ do_check_eq(request1.response.status, 200);
+
+ let request2 = localRequest(server, "/2.0/123/storage/testcoll/bso0",
+ "123", "password");
+ request2.setHeader("X-If-Unmodified-Since", serverModified - 1);
+ error = doGetRequest(request2);
+ do_check_null(error);
+ do_check_eq(request2.response.status, 412);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_missing_collection_404() {
+ _("Ensure a missing collection returns a 404.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+
+ let request = localRequest(server, "/2.0/123/storage/none", "123", "password");
+ let error = doGetRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 404);
+ do_check_false("content-type" in request.response.headers);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_get_storage_405() {
+ _("Ensure that a GET on /storage results in a 405.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+
+ let request = localRequest(server, "/2.0/123/storage", "123", "password");
+ let error = doGetRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 405);
+ do_check_eq(request.response.headers["allow"], "DELETE");
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_delete_storage() {
+ _("Ensure that deleting all of storage works.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.createContents("123", {
+ foo: {a: {foo: "bar"}, b: {bar: "foo"}},
+ baz: {c: {bob: "law"}, blah: {law: "blog"}}
+ });
+
+ server.startSynchronous();
+
+ let request = localRequest(server, "/2.0/123/storage", "123", "password");
+ let error = doDeleteRequest(request);
+ do_check_eq(error, null);
+ do_check_eq(request.response.status, 204);
+ do_check_attribute_count(server.users["123"].collections, 0);
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_x_num_records() {
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+
+ server.createContents("123", {
+ crypto: {foos: {foo: "bar"},
+ bars: {foo: "baz"}}
+ });
+ server.startSynchronous();
+ let bso = localRequest(server, "/2.0/123/storage/crypto/foos");
+ bso.get(function (err) {
+ // BSO fetches don't have one.
+ do_check_false("x-num-records" in this.response.headers);
+ let col = localRequest(server, "/2.0/123/storage/crypto");
+ col.get(function (err) {
+ // Collection fetches do.
+ do_check_eq(this.response.headers["x-num-records"], "2");
+ server.stop(run_next_test);
+ });
+ });
+});
+
+add_test(function test_put_delete_put() {
+ _("Bug 790397: Ensure BSO deleted flag is reset on PUT.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.createContents("123", {
+ test: {bso: {foo: "bar"}}
+ });
+ server.startSynchronous();
+
+ _("Ensure we can PUT an existing record.");
+ let request1 = localRequest(server, "/2.0/123/storage/test/bso", "123", "password");
+ request1.setHeader("Content-Type", "application/json");
+ let payload1 = JSON.stringify({"payload": "foobar"});
+ let error1 = doPutRequest(request1, payload1);
+ do_check_eq(null, error1);
+ do_check_eq(request1.response.status, 204);
+
+ _("Ensure we can DELETE it.");
+ let request2 = localRequest(server, "/2.0/123/storage/test/bso", "123", "password");
+ let error2 = doDeleteRequest(request2);
+ do_check_eq(error2, null);
+ do_check_eq(request2.response.status, 204);
+ do_check_false("content-type" in request2.response.headers);
+
+ _("Ensure we can PUT a previously deleted record.");
+ let request3 = localRequest(server, "/2.0/123/storage/test/bso", "123", "password");
+ request3.setHeader("Content-Type", "application/json");
+ let payload3 = JSON.stringify({"payload": "foobar"});
+ let error3 = doPutRequest(request3, payload3);
+ do_check_eq(null, error3);
+ do_check_eq(request3.response.status, 201);
+
+ _("Ensure we can GET the re-uploaded record.");
+ let request4 = localRequest(server, "/2.0/123/storage/test/bso", "123", "password");
+ let error4 = doGetRequest(request4);
+ do_check_eq(error4, null);
+ do_check_eq(request4.response.status, 200);
+ do_check_eq(request4.response.headers["content-type"], "application/json");
+
+ server.stop(run_next_test);
+});
+
+add_test(function test_collection_get_newer() {
+ _("Ensure get with newer argument on collection works.");
+
+ let server = new StorageServer();
+ server.registerUser("123", "password");
+ server.startSynchronous();
+
+ let coll = server.user("123").createCollection("test");
+ let bso1 = coll.insert("001", {foo: "bar"});
+ let bso2 = coll.insert("002", {bar: "foo"});
+
+ // Don't want both records to have the same timestamp.
+ bso2.modified = bso1.modified + 1000;
+
+ function newerRequest(newer) {
+ return localRequest(server, "/2.0/123/storage/test?newer=" + newer,
+ "123", "password");
+ }
+
+ let request1 = newerRequest(0);
+ let error1 = doGetRequest(request1);
+ do_check_null(error1);
+ do_check_eq(request1.response.status, 200);
+ let items1 = JSON.parse(request1.response.body).items;
+ do_check_attribute_count(items1, 2);
+
+ let request2 = newerRequest(bso1.modified + 1);
+ let error2 = doGetRequest(request2);
+ do_check_null(error2);
+ do_check_eq(request2.response.status, 200);
+ let items2 = JSON.parse(request2.response.body).items;
+ do_check_attribute_count(items2, 1);
+
+ let request3 = newerRequest(bso2.modified + 1);
+ let error3 = doGetRequest(request3);
+ do_check_null(error3);
+ do_check_eq(request3.response.status, 200);
+ let items3 = JSON.parse(request3.response.body).items;
+ do_check_attribute_count(items3, 0);
+
+ server.stop(run_next_test);
+});
diff --git a/services/common/tests/unit/test_tokenauthenticatedrequest.js b/services/common/tests/unit/test_tokenauthenticatedrequest.js
new file mode 100644
index 000000000..0a2db0425
--- /dev/null
+++ b/services/common/tests/unit/test_tokenauthenticatedrequest.js
@@ -0,0 +1,52 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-crypto/utils.js");
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ initTestLogging("Trace");
+ run_next_test();
+}
+
+add_test(function test_authenticated_request() {
+ _("Ensure that sending a MAC authenticated GET request works as expected.");
+
+ let message = "Great Success!";
+
+ // TODO: We use a preset key here, but use getTokenFromBrowserIDAssertion()
+ // from TokenServerClient to get a real one when possible. (Bug 745800)
+ let id = "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x";
+ let key = "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=";
+ let method = "GET";
+
+ let nonce = btoa(CryptoUtils.generateRandomBytes(16));
+ let ts = Math.floor(Date.now() / 1000);
+ let extra = {ts: ts, nonce: nonce};
+
+ let auth;
+
+ let server = httpd_setup({"/foo": function(request, response) {
+ do_check_true(request.hasHeader("Authorization"));
+ do_check_eq(auth, request.getHeader("Authorization"));
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(message, message.length);
+ }
+ });
+ let uri = CommonUtils.makeURI(server.baseURI + "/foo");
+ let sig = CryptoUtils.computeHTTPMACSHA1(id, key, method, uri, extra);
+ auth = sig.getHeader();
+
+ let req = new TokenAuthenticatedRESTRequest(uri, {id: id, key: key}, extra);
+ let cb = Async.makeSpinningCallback();
+ req.get(cb);
+ let result = cb.wait();
+
+ do_check_eq(null, result);
+ do_check_eq(message, req.response.body);
+
+ server.stop(run_next_test);
+});
diff --git a/services/common/tests/unit/test_tokenserverclient.js b/services/common/tests/unit/test_tokenserverclient.js
new file mode 100644
index 000000000..a3650f047
--- /dev/null
+++ b/services/common/tests/unit/test_tokenserverclient.js
@@ -0,0 +1,466 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/async.js");
+Cu.import("resource://services-common/tokenserverclient.js");
+
+function run_test() {
+ initTestLogging("Trace");
+
+ run_next_test();
+}
+
+add_test(function test_working_bid_exchange() {
+ _("Ensure that working BrowserID token exchange works as expected.");
+
+ let service = "http://example.com/foo";
+ let duration = 300;
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ do_check_true(request.hasHeader("accept"));
+ do_check_false(request.hasHeader("x-conditions-accepted"));
+ do_check_eq("application/json", request.getHeader("accept"));
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json");
+
+ let body = JSON.stringify({
+ id: "id",
+ key: "key",
+ api_endpoint: service,
+ uid: "uid",
+ duration: duration,
+ });
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let cb = Async.makeSpinningCallback();
+ let url = server.baseURI + "/1.0/foo/1.0";
+ client.getTokenFromBrowserIDAssertion(url, "assertion", cb);
+ let result = cb.wait();
+ do_check_eq("object", typeof(result));
+ do_check_attribute_count(result, 6);
+ do_check_eq(service, result.endpoint);
+ do_check_eq("id", result.id);
+ do_check_eq("key", result.key);
+ do_check_eq("uid", result.uid);
+ do_check_eq(duration, result.duration);
+ server.stop(run_next_test);
+});
+
+add_test(function test_invalid_arguments() {
+ _("Ensure invalid arguments to APIs are rejected.");
+
+ let args = [
+ [null, "assertion", function() {}],
+ ["http://example.com/", null, function() {}],
+ ["http://example.com/", "assertion", null]
+ ];
+
+ for (let arg of args) {
+ try {
+ let client = new TokenServerClient();
+ client.getTokenFromBrowserIDAssertion(arg[0], arg[1], arg[2]);
+ do_throw("Should never get here.");
+ } catch (ex) {
+ do_check_true(ex instanceof TokenServerClientError);
+ }
+ }
+
+ run_next_test();
+});
+
+add_test(function test_conditions_required_response_handling() {
+ _("Ensure that a conditions required response is handled properly.");
+
+ let description = "Need to accept conditions";
+ let tosURL = "http://example.com/tos";
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ do_check_false(request.hasHeader("x-conditions-accepted"));
+
+ response.setStatusLine(request.httpVersion, 403, "Forbidden");
+ response.setHeader("Content-Type", "application/json");
+
+ let body = JSON.stringify({
+ errors: [{description: description, location: "body", name: ""}],
+ urls: {tos: tosURL}
+ });
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+
+ function onResponse(error, token) {
+ do_check_true(error instanceof TokenServerClientServerError);
+ do_check_eq(error.cause, "conditions-required");
+ // Check a JSON.stringify works on our errors as our logging will try and use it.
+ do_check_true(JSON.stringify(error), "JSON.stringify worked");
+ do_check_null(token);
+
+ do_check_eq(error.urls.tos, tosURL);
+
+ server.stop(run_next_test);
+ }
+
+ client.getTokenFromBrowserIDAssertion(url, "assertion", onResponse);
+});
+
+add_test(function test_invalid_403_no_content_type() {
+ _("Ensure that a 403 without content-type is handled properly.");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 403, "Forbidden");
+ // No Content-Type header by design.
+
+ let body = JSON.stringify({
+ errors: [{description: "irrelevant", location: "body", name: ""}],
+ urls: {foo: "http://bar"}
+ });
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+
+ function onResponse(error, token) {
+ do_check_true(error instanceof TokenServerClientServerError);
+ do_check_eq(error.cause, "malformed-response");
+ do_check_null(token);
+
+ do_check_null(error.urls);
+
+ server.stop(run_next_test);
+ }
+
+ client.getTokenFromBrowserIDAssertion(url, "assertion", onResponse);
+});
+
+add_test(function test_invalid_403_bad_json() {
+ _("Ensure that a 403 with JSON that isn't proper is handled properly.");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 403, "Forbidden");
+ response.setHeader("Content-Type", "application/json; charset=utf-8");
+
+ let body = JSON.stringify({
+ foo: "bar"
+ });
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+
+ function onResponse(error, token) {
+ do_check_true(error instanceof TokenServerClientServerError);
+ do_check_eq(error.cause, "malformed-response");
+ do_check_null(token);
+ do_check_null(error.urls);
+
+ server.stop(run_next_test);
+ }
+
+ client.getTokenFromBrowserIDAssertion(url, "assertion", onResponse);
+});
+
+add_test(function test_403_no_urls() {
+ _("Ensure that a 403 without a urls field is handled properly.");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 403, "Forbidden");
+ response.setHeader("Content-Type", "application/json; charset=utf-8");
+
+ let body = "{}";
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+
+ client.getTokenFromBrowserIDAssertion(url, "assertion",
+ function onResponse(error, result) {
+ do_check_true(error instanceof TokenServerClientServerError);
+ do_check_eq(error.cause, "malformed-response");
+ do_check_null(result);
+
+ server.stop(run_next_test);
+
+ });
+});
+
+add_test(function test_send_extra_headers() {
+ _("Ensures that the condition acceptance header is sent when asked.");
+
+ let duration = 300;
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ do_check_true(request.hasHeader("x-foo"));
+ do_check_eq(request.getHeader("x-foo"), "42");
+
+ do_check_true(request.hasHeader("x-bar"));
+ do_check_eq(request.getHeader("x-bar"), "17");
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json");
+
+ let body = JSON.stringify({
+ id: "id",
+ key: "key",
+ api_endpoint: "http://example.com/",
+ uid: "uid",
+ duration: duration,
+ });
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+
+ function onResponse(error, token) {
+ do_check_null(error);
+
+ // Other tests validate other things.
+
+ server.stop(run_next_test);
+ }
+
+ let extra = {
+ "X-Foo": 42,
+ "X-Bar": 17
+ };
+ client.getTokenFromBrowserIDAssertion(url, "assertion", onResponse, extra);
+});
+
+add_test(function test_error_404_empty() {
+ _("Ensure that 404 responses without proper response are handled properly.");
+
+ let server = httpd_setup();
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/foo";
+ client.getTokenFromBrowserIDAssertion(url, "assertion", function(error, r) {
+ do_check_true(error instanceof TokenServerClientServerError);
+ do_check_eq(error.cause, "malformed-response");
+
+ do_check_neq(null, error.response);
+ do_check_null(r);
+
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_error_404_proper_response() {
+ _("Ensure that a Cornice error report for 404 is handled properly.");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 404, "Not Found");
+ response.setHeader("Content-Type", "application/json; charset=utf-8");
+
+ let body = JSON.stringify({
+ status: 404,
+ errors: [{description: "No service", location: "body", name: ""}],
+ });
+
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ function onResponse(error, token) {
+ do_check_true(error instanceof TokenServerClientServerError);
+ do_check_eq(error.cause, "unknown-service");
+ do_check_null(token);
+
+ server.stop(run_next_test);
+ }
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+ client.getTokenFromBrowserIDAssertion(url, "assertion", onResponse);
+});
+
+add_test(function test_bad_json() {
+ _("Ensure that malformed JSON is handled properly.");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json");
+
+ let body = '{"id": "id", baz}'
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+ client.getTokenFromBrowserIDAssertion(url, "assertion", function(error, r) {
+ do_check_neq(null, error);
+ do_check_eq("TokenServerClientServerError", error.name);
+ do_check_eq(error.cause, "malformed-response");
+ do_check_neq(null, error.response);
+ do_check_eq(null, r);
+
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_400_response() {
+ _("Ensure HTTP 400 is converted to malformed-request.");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 400, "Bad Request");
+ response.setHeader("Content-Type", "application/json; charset=utf-8");
+
+ let body = "{}"; // Actual content may not be used.
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+ client.getTokenFromBrowserIDAssertion(url, "assertion", function(error, r) {
+ do_check_neq(null, error);
+ do_check_eq("TokenServerClientServerError", error.name);
+ do_check_neq(null, error.response);
+ do_check_eq(error.cause, "malformed-request");
+
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_401_with_error_cause() {
+ _("Ensure 401 cause is specified in body.status");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 401, "Unauthorized");
+ response.setHeader("Content-Type", "application/json; charset=utf-8");
+
+ let body = JSON.stringify({status: "no-soup-for-you"});
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let client = new TokenServerClient();
+ let url = server.baseURI + "/1.0/foo/1.0";
+ client.getTokenFromBrowserIDAssertion(url, "assertion", function(error, r) {
+ do_check_neq(null, error);
+ do_check_eq("TokenServerClientServerError", error.name);
+ do_check_neq(null, error.response);
+ do_check_eq(error.cause, "no-soup-for-you");
+
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_unhandled_media_type() {
+ _("Ensure that unhandled media types throw an error.");
+
+ let server = httpd_setup({
+ "/1.0/foo/1.0": function(request, response) {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "text/plain");
+
+ let body = "hello, world";
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let url = server.baseURI + "/1.0/foo/1.0";
+ let client = new TokenServerClient();
+ client.getTokenFromBrowserIDAssertion(url, "assertion", function(error, r) {
+ do_check_neq(null, error);
+ do_check_eq("TokenServerClientServerError", error.name);
+ do_check_neq(null, error.response);
+ do_check_eq(null, r);
+
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_rich_media_types() {
+ _("Ensure that extra tokens in the media type aren't rejected.");
+
+ let duration = 300;
+ let server = httpd_setup({
+ "/foo": function(request, response) {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json; foo=bar; bar=foo");
+
+ let body = JSON.stringify({
+ id: "id",
+ key: "key",
+ api_endpoint: "foo",
+ uid: "uid",
+ duration: duration,
+ });
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let url = server.baseURI + "/foo";
+ let client = new TokenServerClient();
+ client.getTokenFromBrowserIDAssertion(url, "assertion", function(error, r) {
+ do_check_eq(null, error);
+
+ server.stop(run_next_test);
+ });
+});
+
+add_test(function test_exception_during_callback() {
+ _("Ensure that exceptions thrown during callback handling are handled.");
+
+ let duration = 300;
+ let server = httpd_setup({
+ "/foo": function(request, response) {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.setHeader("Content-Type", "application/json");
+
+ let body = JSON.stringify({
+ id: "id",
+ key: "key",
+ api_endpoint: "foo",
+ uid: "uid",
+ duration: duration,
+ });
+ response.bodyOutputStream.write(body, body.length);
+ }
+ });
+
+ let url = server.baseURI + "/foo";
+ let client = new TokenServerClient();
+ let cb = Async.makeSpinningCallback();
+ let callbackCount = 0;
+
+ client.getTokenFromBrowserIDAssertion(url, "assertion", function(error, r) {
+ do_check_eq(null, error);
+
+ cb();
+
+ callbackCount += 1;
+ throw new Error("I am a bad function!");
+ });
+
+ cb.wait();
+ // This relies on some heavy event loop magic. The error in the main
+ // callback should already have been raised at this point.
+ do_check_eq(callbackCount, 1);
+
+ server.stop(run_next_test);
+});
diff --git a/services/common/tests/unit/test_utils_atob.js b/services/common/tests/unit/test_utils_atob.js
new file mode 100644
index 000000000..422fcab20
--- /dev/null
+++ b/services/common/tests/unit/test_utils_atob.js
@@ -0,0 +1,11 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ let data = ["Zm9vYmE=", "Zm9vYmE==", "Zm9vYmE==="];
+ for (let d in data) {
+ do_check_eq(CommonUtils.safeAtoB(data[d]), "fooba");
+ }
+}
diff --git a/services/common/tests/unit/test_utils_convert_string.js b/services/common/tests/unit/test_utils_convert_string.js
new file mode 100644
index 000000000..265b6734f
--- /dev/null
+++ b/services/common/tests/unit/test_utils_convert_string.js
@@ -0,0 +1,132 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://services-common/utils.js");
+
+// A wise line of Greek verse, and the utf-8 byte encoding.
+// N.b., Greek begins at utf-8 ce 91
+const TEST_STR = "πόλλ' οἶδ' ἀλώπηξ, ἀλλ' ἐχῖνος ἓν μέγα";
+const TEST_HEX = h("cf 80 cf 8c ce bb ce bb 27 20 ce bf e1 bc b6 ce"+
+ "b4 27 20 e1 bc 80 ce bb cf 8e cf 80 ce b7 ce be"+
+ "2c 20 e1 bc 80 ce bb ce bb 27 20 e1 bc 90 cf 87"+
+ "e1 bf 96 ce bd ce bf cf 82 20 e1 bc 93 ce bd 20"+
+ "ce bc ce ad ce b3 ce b1");
+// Integer byte values for the above
+const TEST_BYTES = [207,128,207,140,206,187,206,187,
+ 39, 32,206,191,225,188,182,206,
+ 180, 39, 32,225,188,128,206,187,
+ 207,142,207,128,206,183,206,190,
+ 44, 32,225,188,128,206,187,206,
+ 187, 39, 32,225,188,144,207,135,
+ 225,191,150,206,189,206,191,207,
+ 130, 32,225,188,147,206,189, 32,
+ 206,188,206,173,206,179,206,177];
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_compress_string() {
+ const INPUT = "hello";
+
+ let result = CommonUtils.convertString(INPUT, "uncompressed", "deflate");
+ do_check_eq(result.length, 13);
+
+ let result2 = CommonUtils.convertString(INPUT, "uncompressed", "deflate");
+ do_check_eq(result, result2);
+
+ let result3 = CommonUtils.convertString(result, "deflate", "uncompressed");
+ do_check_eq(result3, INPUT);
+
+ run_next_test();
+});
+
+add_test(function test_compress_utf8() {
+ const INPUT = "Árvíztűrő tükörfúrógép いろはにほへとちりぬるを Pijamalı hasta, yağız şoföre çabucak güvendi.";
+ let inputUTF8 = CommonUtils.encodeUTF8(INPUT);
+
+ let compressed = CommonUtils.convertString(inputUTF8, "uncompressed", "deflate");
+ let uncompressed = CommonUtils.convertString(compressed, "deflate", "uncompressed");
+
+ do_check_eq(uncompressed, inputUTF8);
+
+ let outputUTF8 = CommonUtils.decodeUTF8(uncompressed);
+ do_check_eq(outputUTF8, INPUT);
+
+ run_next_test();
+});
+
+add_test(function test_bad_argument() {
+ let failed = false;
+ try {
+ CommonUtils.convertString(null, "uncompressed", "deflate");
+ } catch (ex) {
+ failed = true;
+ do_check_true(ex.message.startsWith("Input string must be defined"));
+ } finally {
+ do_check_true(failed);
+ }
+
+ run_next_test();
+});
+
+add_task(function test_stringAsHex() {
+ do_check_eq(TEST_HEX, CommonUtils.stringAsHex(TEST_STR));
+});
+
+add_task(function test_hexAsString() {
+ do_check_eq(TEST_STR, CommonUtils.hexAsString(TEST_HEX));
+});
+
+add_task(function test_hexToBytes() {
+ let bytes = CommonUtils.hexToBytes(TEST_HEX);
+ do_check_eq(TEST_BYTES.length, bytes.length);
+ // Ensure that the decimal values of each byte are correct
+ do_check_true(arraysEqual(TEST_BYTES,
+ CommonUtils.stringToByteArray(bytes)));
+});
+
+add_task(function test_bytesToHex() {
+ // Create a list of our character bytes from the reference int values
+ let bytes = CommonUtils.byteArrayToString(TEST_BYTES);
+ do_check_eq(TEST_HEX, CommonUtils.bytesAsHex(bytes));
+});
+
+add_task(function test_stringToBytes() {
+ do_check_true(arraysEqual(TEST_BYTES,
+ CommonUtils.stringToByteArray(CommonUtils.stringToBytes(TEST_STR))));
+});
+
+add_task(function test_stringRoundTrip() {
+ do_check_eq(TEST_STR,
+ CommonUtils.hexAsString(CommonUtils.stringAsHex(TEST_STR)));
+});
+
+add_task(function test_hexRoundTrip() {
+ do_check_eq(TEST_HEX,
+ CommonUtils.stringAsHex(CommonUtils.hexAsString(TEST_HEX)));
+});
+
+add_task(function test_byteArrayRoundTrip() {
+ do_check_true(arraysEqual(TEST_BYTES,
+ CommonUtils.stringToByteArray(CommonUtils.byteArrayToString(TEST_BYTES))));
+});
+
+// turn formatted test vectors into normal hex strings
+function h(hexStr) {
+ return hexStr.replace(/\s+/g, "");
+}
+
+function arraysEqual(a1, a2) {
+ if (a1.length !== a2.length) {
+ return false;
+ }
+ for (let i = 0; i < a1.length; i++) {
+ if (a1[i] !== a2[i]) {
+ return false;
+ }
+ }
+ return true;
+}
diff --git a/services/common/tests/unit/test_utils_dateprefs.js b/services/common/tests/unit/test_utils_dateprefs.js
new file mode 100644
index 000000000..f16e3dbe8
--- /dev/null
+++ b/services/common/tests/unit/test_utils_dateprefs.js
@@ -0,0 +1,85 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://gre/modules/Preferences.jsm");
+Cu.import("resource://services-common/utils.js");
+
+
+var prefs = new Preferences("servicescommon.tests.");
+
+function DummyLogger() {
+ this.messages = [];
+}
+DummyLogger.prototype.warn = function warn(message) {
+ this.messages.push(message);
+};
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_set_basic() {
+ let now = new Date();
+
+ CommonUtils.setDatePref(prefs, "test00", now);
+ let value = prefs.get("test00");
+ do_check_eq(value, "" + now.getTime());
+
+ let now2 = CommonUtils.getDatePref(prefs, "test00");
+
+ do_check_eq(now.getTime(), now2.getTime());
+
+ run_next_test();
+});
+
+add_test(function test_set_bounds_checking() {
+ let d = new Date(2342354);
+
+ let failed = false;
+ try {
+ CommonUtils.setDatePref(prefs, "test01", d);
+ } catch (ex) {
+ do_check_true(ex.message.startsWith("Trying to set"));
+ failed = true;
+ }
+
+ do_check_true(failed);
+ run_next_test();
+});
+
+add_test(function test_get_bounds_checking() {
+ prefs.set("test_bounds_checking", "13241431");
+
+ let log = new DummyLogger();
+ let d = CommonUtils.getDatePref(prefs, "test_bounds_checking", 0, log);
+ do_check_eq(d.getTime(), 0);
+ do_check_eq(log.messages.length, 1);
+
+ run_next_test();
+});
+
+add_test(function test_get_bad_default() {
+ let failed = false;
+ try {
+ CommonUtils.getDatePref(prefs, "get_bad_default", new Date());
+ } catch (ex) {
+ do_check_true(ex.message.startsWith("Default value is not a number"));
+ failed = true;
+ }
+
+ do_check_true(failed);
+ run_next_test();
+});
+
+add_test(function test_get_invalid_number() {
+ prefs.set("get_invalid_number", "hello world");
+
+ let log = new DummyLogger();
+ let d = CommonUtils.getDatePref(prefs, "get_invalid_number", 42, log);
+ do_check_eq(d.getTime(), 42);
+ do_check_eq(log.messages.length, 1);
+
+ run_next_test();
+});
diff --git a/services/common/tests/unit/test_utils_deepCopy.js b/services/common/tests/unit/test_utils_deepCopy.js
new file mode 100644
index 000000000..a743d37d3
--- /dev/null
+++ b/services/common/tests/unit/test_utils_deepCopy.js
@@ -0,0 +1,18 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://testing-common/services/common/utils.js");
+
+function run_test() {
+ let thing = {o: {foo: "foo", bar: ["bar"]}, a: ["foo", {bar: "bar"}]};
+ let ret = TestingUtils.deepCopy(thing);
+ do_check_neq(ret, thing)
+ do_check_neq(ret.o, thing.o);
+ do_check_neq(ret.o.bar, thing.o.bar);
+ do_check_neq(ret.a, thing.a);
+ do_check_neq(ret.a[1], thing.a[1]);
+ do_check_eq(ret.o.foo, thing.o.foo);
+ do_check_eq(ret.o.bar[0], thing.o.bar[0]);
+ do_check_eq(ret.a[0], thing.a[0]);
+ do_check_eq(ret.a[1].bar, thing.a[1].bar);
+}
diff --git a/services/common/tests/unit/test_utils_encodeBase32.js b/services/common/tests/unit/test_utils_encodeBase32.js
new file mode 100644
index 000000000..e183040b3
--- /dev/null
+++ b/services/common/tests/unit/test_utils_encodeBase32.js
@@ -0,0 +1,51 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ // Testing byte array manipulation.
+ do_check_eq("FOOBAR", CommonUtils.byteArrayToString([70, 79, 79, 66, 65, 82]));
+ do_check_eq("", CommonUtils.byteArrayToString([]));
+
+ _("Testing encoding...");
+ // Test vectors from RFC 4648
+ do_check_eq(CommonUtils.encodeBase32(""), "");
+ do_check_eq(CommonUtils.encodeBase32("f"), "MY======");
+ do_check_eq(CommonUtils.encodeBase32("fo"), "MZXQ====");
+ do_check_eq(CommonUtils.encodeBase32("foo"), "MZXW6===");
+ do_check_eq(CommonUtils.encodeBase32("foob"), "MZXW6YQ=");
+ do_check_eq(CommonUtils.encodeBase32("fooba"), "MZXW6YTB");
+ do_check_eq(CommonUtils.encodeBase32("foobar"), "MZXW6YTBOI======");
+
+ do_check_eq(CommonUtils.encodeBase32("Bacon is a vegetable."),
+ "IJQWG33OEBUXGIDBEB3GKZ3FORQWE3DFFY======");
+
+ _("Checking assumptions...");
+ for (let i = 0; i <= 255; ++i)
+ do_check_eq(undefined | i, i);
+
+ _("Testing decoding...");
+ do_check_eq(CommonUtils.decodeBase32(""), "");
+ do_check_eq(CommonUtils.decodeBase32("MY======"), "f");
+ do_check_eq(CommonUtils.decodeBase32("MZXQ===="), "fo");
+ do_check_eq(CommonUtils.decodeBase32("MZXW6YTB"), "fooba");
+ do_check_eq(CommonUtils.decodeBase32("MZXW6YTBOI======"), "foobar");
+
+ // Same with incorrect or missing padding.
+ do_check_eq(CommonUtils.decodeBase32("MZXW6YTBOI=="), "foobar");
+ do_check_eq(CommonUtils.decodeBase32("MZXW6YTBOI"), "foobar");
+
+ let encoded = CommonUtils.encodeBase32("Bacon is a vegetable.");
+ _("Encoded to " + JSON.stringify(encoded));
+ do_check_eq(CommonUtils.decodeBase32(encoded), "Bacon is a vegetable.");
+
+ // Test failure.
+ let err;
+ try {
+ CommonUtils.decodeBase32("000");
+ } catch (ex) {
+ err = ex;
+ }
+ do_check_eq(err, "Unknown character in base32: 0");
+}
diff --git a/services/common/tests/unit/test_utils_encodeBase64URL.js b/services/common/tests/unit/test_utils_encodeBase64URL.js
new file mode 100644
index 000000000..5d55a6579
--- /dev/null
+++ b/services/common/tests/unit/test_utils_encodeBase64URL.js
@@ -0,0 +1,27 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_simple() {
+ let expected = {
+ hello: "aGVsbG8=",
+ "<>?": "PD4_",
+ };
+
+ for (let [k,v] of Object.entries(expected)) {
+ do_check_eq(CommonUtils.encodeBase64URL(k), v);
+ }
+
+ run_next_test();
+});
+
+add_test(function test_no_padding() {
+ do_check_eq(CommonUtils.encodeBase64URL("hello", false), "aGVsbG8");
+
+ run_next_test();
+});
diff --git a/services/common/tests/unit/test_utils_ensureMillisecondsTimestamp.js b/services/common/tests/unit/test_utils_ensureMillisecondsTimestamp.js
new file mode 100644
index 000000000..4e9f725ef
--- /dev/null
+++ b/services/common/tests/unit/test_utils_ensureMillisecondsTimestamp.js
@@ -0,0 +1,23 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ do_check_null(CommonUtils.ensureMillisecondsTimestamp(null));
+ do_check_null(CommonUtils.ensureMillisecondsTimestamp(0));
+ do_check_null(CommonUtils.ensureMillisecondsTimestamp("0"));
+ do_check_null(CommonUtils.ensureMillisecondsTimestamp("000"));
+
+ do_check_null(CommonUtils.ensureMillisecondsTimestamp(999 * 10000000000));
+
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp(-1); });
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp(1); });
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp(1.5); });
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp(999 * 10000000000 + 0.5); });
+
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp("-1"); });
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp("1"); });
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp("1.5"); });
+ do_check_throws(function err() { CommonUtils.ensureMillisecondsTimestamp("" + (999 * 10000000000 + 0.5)); });
+}
diff --git a/services/common/tests/unit/test_utils_json.js b/services/common/tests/unit/test_utils_json.js
new file mode 100644
index 000000000..429ac6492
--- /dev/null
+++ b/services/common/tests/unit/test_utils_json.js
@@ -0,0 +1,40 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+Cu.import("resource://gre/modules/osfile.jsm");
+
+function run_test() {
+ initTestLogging();
+ run_next_test();
+}
+
+add_test(function test_writeJSON_readJSON() {
+ _("Round-trip some JSON through the promise-based JSON writer.");
+
+ let contents = {
+ "a": 12345.67,
+ "b": {
+ "c": "héllö",
+ },
+ "d": undefined,
+ "e": null,
+ };
+
+ function checkJSON(json) {
+ do_check_eq(contents.a, json.a);
+ do_check_eq(contents.b.c, json.b.c);
+ do_check_eq(contents.d, json.d);
+ do_check_eq(contents.e, json.e);
+ run_next_test();
+ };
+
+ function doRead() {
+ CommonUtils.readJSON(path)
+ .then(checkJSON, do_throw);
+ }
+
+ let path = OS.Path.join(OS.Constants.Path.profileDir, "bar.json");
+ CommonUtils.writeJSON(contents, path)
+ .then(doRead, do_throw);
+});
diff --git a/services/common/tests/unit/test_utils_makeURI.js b/services/common/tests/unit/test_utils_makeURI.js
new file mode 100644
index 000000000..4b2b9bf71
--- /dev/null
+++ b/services/common/tests/unit/test_utils_makeURI.js
@@ -0,0 +1,66 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+_("Make sure uri strings are converted to nsIURIs");
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ _test_makeURI();
+}
+
+function _test_makeURI() {
+ _("Check http uris");
+ let uri1 = "http://mozillalabs.com/";
+ do_check_eq(CommonUtils.makeURI(uri1).spec, uri1);
+ let uri2 = "http://www.mozillalabs.com/";
+ do_check_eq(CommonUtils.makeURI(uri2).spec, uri2);
+ let uri3 = "http://mozillalabs.com/path";
+ do_check_eq(CommonUtils.makeURI(uri3).spec, uri3);
+ let uri4 = "http://mozillalabs.com/multi/path";
+ do_check_eq(CommonUtils.makeURI(uri4).spec, uri4);
+ let uri5 = "http://mozillalabs.com/?query";
+ do_check_eq(CommonUtils.makeURI(uri5).spec, uri5);
+ let uri6 = "http://mozillalabs.com/#hash";
+ do_check_eq(CommonUtils.makeURI(uri6).spec, uri6);
+
+ _("Check https uris");
+ let uris1 = "https://mozillalabs.com/";
+ do_check_eq(CommonUtils.makeURI(uris1).spec, uris1);
+ let uris2 = "https://www.mozillalabs.com/";
+ do_check_eq(CommonUtils.makeURI(uris2).spec, uris2);
+ let uris3 = "https://mozillalabs.com/path";
+ do_check_eq(CommonUtils.makeURI(uris3).spec, uris3);
+ let uris4 = "https://mozillalabs.com/multi/path";
+ do_check_eq(CommonUtils.makeURI(uris4).spec, uris4);
+ let uris5 = "https://mozillalabs.com/?query";
+ do_check_eq(CommonUtils.makeURI(uris5).spec, uris5);
+ let uris6 = "https://mozillalabs.com/#hash";
+ do_check_eq(CommonUtils.makeURI(uris6).spec, uris6);
+
+ _("Check chrome uris");
+ let uric1 = "chrome://browser/content/browser.xul";
+ do_check_eq(CommonUtils.makeURI(uric1).spec, uric1);
+ let uric2 = "chrome://browser/skin/browser.css";
+ do_check_eq(CommonUtils.makeURI(uric2).spec, uric2);
+ let uric3 = "chrome://browser/locale/browser.dtd";
+ do_check_eq(CommonUtils.makeURI(uric3).spec, uric3);
+
+ _("Check about uris");
+ let uria1 = "about:weave";
+ do_check_eq(CommonUtils.makeURI(uria1).spec, uria1);
+ let uria2 = "about:weave/";
+ do_check_eq(CommonUtils.makeURI(uria2).spec, uria2);
+ let uria3 = "about:weave/path";
+ do_check_eq(CommonUtils.makeURI(uria3).spec, uria3);
+ let uria4 = "about:weave/multi/path";
+ do_check_eq(CommonUtils.makeURI(uria4).spec, uria4);
+ let uria5 = "about:weave/?query";
+ do_check_eq(CommonUtils.makeURI(uria5).spec, uria5);
+ let uria6 = "about:weave/#hash";
+ do_check_eq(CommonUtils.makeURI(uria6).spec, uria6);
+
+ _("Invalid uris are undefined");
+ do_check_eq(CommonUtils.makeURI("mozillalabs.com"), undefined);
+ do_check_eq(CommonUtils.makeURI("chrome://badstuff"), undefined);
+ do_check_eq(CommonUtils.makeURI("this is a test"), undefined);
+}
diff --git a/services/common/tests/unit/test_utils_namedTimer.js b/services/common/tests/unit/test_utils_namedTimer.js
new file mode 100644
index 000000000..61a65e260
--- /dev/null
+++ b/services/common/tests/unit/test_utils_namedTimer.js
@@ -0,0 +1,69 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_required_args() {
+ try {
+ CommonUtils.namedTimer(function callback() {
+ do_throw("Shouldn't fire.");
+ }, 0);
+ do_throw("Should have thrown!");
+ } catch(ex) {
+ run_next_test();
+ }
+});
+
+add_test(function test_simple() {
+ _("Test basic properties of CommonUtils.namedTimer.");
+
+ const delay = 200;
+ let that = {};
+ let t0 = Date.now();
+ CommonUtils.namedTimer(function callback(timer) {
+ do_check_eq(this, that);
+ do_check_eq(this._zetimer, null);
+ do_check_true(timer instanceof Ci.nsITimer);
+ // Difference should be ~delay, but hard to predict on all platforms,
+ // particularly Windows XP.
+ do_check_true(Date.now() > t0);
+ run_next_test();
+ }, delay, that, "_zetimer");
+});
+
+add_test(function test_delay() {
+ _("Test delaying a timer that hasn't fired yet.");
+
+ const delay = 100;
+ let that = {};
+ let t0 = Date.now();
+ function callback(timer) {
+ // Difference should be ~2*delay, but hard to predict on all platforms,
+ // particularly Windows XP.
+ do_check_true((Date.now() - t0) > delay);
+ run_next_test();
+ }
+ CommonUtils.namedTimer(callback, delay, that, "_zetimer");
+ CommonUtils.namedTimer(callback, 2 * delay, that, "_zetimer");
+ run_next_test();
+});
+
+add_test(function test_clear() {
+ _("Test clearing a timer that hasn't fired yet.");
+
+ const delay = 0;
+ let that = {};
+ CommonUtils.namedTimer(function callback(timer) {
+ do_throw("Shouldn't fire!");
+ }, delay, that, "_zetimer");
+
+ that._zetimer.clear();
+ do_check_eq(that._zetimer, null);
+ CommonUtils.nextTick(run_next_test);
+
+ run_next_test();
+});
diff --git a/services/common/tests/unit/test_utils_sets.js b/services/common/tests/unit/test_utils_sets.js
new file mode 100644
index 000000000..c02c7f486
--- /dev/null
+++ b/services/common/tests/unit/test_utils_sets.js
@@ -0,0 +1,72 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+Cu.import("resource://services-common/utils.js");
+
+const EMPTY = new Set();
+const A = new Set(["a"]);
+const ABC = new Set(["a", "b", "c"]);
+const ABCD = new Set(["a", "b", "c", "d"]);
+const BC = new Set(["b", "c"]);
+const BCD = new Set(["b", "c", "d"]);
+const FGH = new Set(["f", "g", "h"]);
+const BCDFGH = new Set(["b", "c", "d", "f", "g", "h"]);
+
+var union = CommonUtils.union;
+var difference = CommonUtils.difference;
+var intersection = CommonUtils.intersection;
+var setEqual = CommonUtils.setEqual;
+
+function do_check_setEqual(a, b) {
+ do_check_true(setEqual(a, b));
+}
+
+function do_check_not_setEqual(a, b) {
+ do_check_false(setEqual(a, b));
+}
+
+function run_test() {
+ run_next_test();
+}
+
+add_test(function test_setEqual() {
+ do_check_setEqual(EMPTY, EMPTY);
+ do_check_setEqual(EMPTY, new Set());
+ do_check_setEqual(A, A);
+ do_check_setEqual(A, new Set(["a"]));
+ do_check_setEqual(new Set(["a"]), A);
+ do_check_not_setEqual(A, EMPTY);
+ do_check_not_setEqual(EMPTY, A);
+ do_check_not_setEqual(ABC, A);
+ run_next_test();
+});
+
+add_test(function test_union() {
+ do_check_setEqual(EMPTY, union(EMPTY, EMPTY));
+ do_check_setEqual(ABC, union(EMPTY, ABC));
+ do_check_setEqual(ABC, union(ABC, ABC));
+ do_check_setEqual(ABCD, union(ABC, BCD));
+ do_check_setEqual(ABCD, union(BCD, ABC));
+ do_check_setEqual(BCDFGH, union(BCD, FGH));
+ run_next_test();
+});
+
+add_test(function test_difference() {
+ do_check_setEqual(EMPTY, difference(EMPTY, EMPTY));
+ do_check_setEqual(EMPTY, difference(EMPTY, A));
+ do_check_setEqual(EMPTY, difference(A, A));
+ do_check_setEqual(ABC, difference(ABC, EMPTY));
+ do_check_setEqual(ABC, difference(ABC, FGH));
+ do_check_setEqual(A, difference(ABC, BCD));
+ run_next_test();
+});
+
+add_test(function test_intersection() {
+ do_check_setEqual(EMPTY, intersection(EMPTY, EMPTY));
+ do_check_setEqual(EMPTY, intersection(ABC, EMPTY));
+ do_check_setEqual(EMPTY, intersection(ABC, FGH));
+ do_check_setEqual(BC, intersection(ABC, BCD));
+ run_next_test();
+});
diff --git a/services/common/tests/unit/test_utils_utf8.js b/services/common/tests/unit/test_utils_utf8.js
new file mode 100644
index 000000000..b0fd540f5
--- /dev/null
+++ b/services/common/tests/unit/test_utils_utf8.js
@@ -0,0 +1,11 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+Cu.import("resource://services-common/utils.js");
+
+function run_test() {
+ let str = "Umlaute: \u00FC \u00E4\n"; // Umlaute: ü ä
+ let encoded = CommonUtils.encodeUTF8(str);
+ let decoded = CommonUtils.decodeUTF8(encoded);
+ do_check_eq(decoded, str);
+}
diff --git a/services/common/tests/unit/test_utils_uuid.js b/services/common/tests/unit/test_utils_uuid.js
new file mode 100644
index 000000000..f1eabf50e
--- /dev/null
+++ b/services/common/tests/unit/test_utils_uuid.js
@@ -0,0 +1,12 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+function run_test() {
+ let uuid = CommonUtils.generateUUID();
+ do_check_eq(uuid.length, 36);
+ do_check_eq(uuid[8], "-");
+
+ run_next_test();
+}
diff --git a/services/common/tests/unit/xpcshell.ini b/services/common/tests/unit/xpcshell.ini
new file mode 100644
index 000000000..dbec09519
--- /dev/null
+++ b/services/common/tests/unit/xpcshell.ini
@@ -0,0 +1,53 @@
+[DEFAULT]
+head = head_global.js head_helpers.js head_http.js
+tail =
+firefox-appdir = browser
+support-files =
+ test_storage_adapter/**
+ test_blocklist_signatures/**
+
+# Test load modules first so syntax failures are caught early.
+[test_load_modules.js]
+
+[test_blocklist_certificates.js]
+[test_blocklist_clients.js]
+[test_blocklist_updater.js]
+
+[test_kinto.js]
+[test_blocklist_signatures.js]
+[test_storage_adapter.js]
+
+[test_utils_atob.js]
+[test_utils_convert_string.js]
+[test_utils_dateprefs.js]
+[test_utils_deepCopy.js]
+[test_utils_encodeBase32.js]
+[test_utils_encodeBase64URL.js]
+[test_utils_ensureMillisecondsTimestamp.js]
+[test_utils_json.js]
+[test_utils_makeURI.js]
+[test_utils_namedTimer.js]
+[test_utils_sets.js]
+[test_utils_utf8.js]
+[test_utils_uuid.js]
+
+[test_async_chain.js]
+[test_async_querySpinningly.js]
+
+[test_hawkclient.js]
+skip-if = os == "android"
+[test_hawkrequest.js]
+skip-if = os == "android"
+
+[test_logmanager.js]
+[test_observers.js]
+[test_restrequest.js]
+
+[test_tokenauthenticatedrequest.js]
+skip-if = os == "android"
+
+[test_tokenserverclient.js]
+skip-if = os == "android"
+
+[test_storage_server.js]
+skip-if = os == "android"
diff --git a/services/common/tokenserverclient.js b/services/common/tokenserverclient.js
new file mode 100644
index 000000000..b220ab586
--- /dev/null
+++ b/services/common/tokenserverclient.js
@@ -0,0 +1,462 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+this.EXPORTED_SYMBOLS = [
+ "TokenServerClient",
+ "TokenServerClientError",
+ "TokenServerClientNetworkError",
+ "TokenServerClientServerError",
+];
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/Log.jsm");
+Cu.import("resource://services-common/rest.js");
+Cu.import("resource://services-common/observers.js");
+
+const PREF_LOG_LEVEL = "services.common.log.logger.tokenserverclient";
+
+/**
+ * Represents a TokenServerClient error that occurred on the client.
+ *
+ * This is the base type for all errors raised by client operations.
+ *
+ * @param message
+ * (string) Error message.
+ */
+this.TokenServerClientError = function TokenServerClientError(message) {
+ this.name = "TokenServerClientError";
+ this.message = message || "Client error.";
+ // Without explicitly setting .stack, all stacks from these errors will point
+ // to the "new Error()" call a few lines down, which isn't helpful.
+ this.stack = Error().stack;
+}
+TokenServerClientError.prototype = new Error();
+TokenServerClientError.prototype.constructor = TokenServerClientError;
+TokenServerClientError.prototype._toStringFields = function() {
+ return {message: this.message};
+}
+TokenServerClientError.prototype.toString = function() {
+ return this.name + "(" + JSON.stringify(this._toStringFields()) + ")";
+}
+TokenServerClientError.prototype.toJSON = function() {
+ let result = this._toStringFields();
+ result["name"] = this.name;
+ return result;
+}
+
+/**
+ * Represents a TokenServerClient error that occurred in the network layer.
+ *
+ * @param error
+ * The underlying error thrown by the network layer.
+ */
+this.TokenServerClientNetworkError =
+ function TokenServerClientNetworkError(error) {
+ this.name = "TokenServerClientNetworkError";
+ this.error = error;
+ this.stack = Error().stack;
+}
+TokenServerClientNetworkError.prototype = new TokenServerClientError();
+TokenServerClientNetworkError.prototype.constructor =
+ TokenServerClientNetworkError;
+TokenServerClientNetworkError.prototype._toStringFields = function() {
+ return {error: this.error};
+}
+
+/**
+ * Represents a TokenServerClient error that occurred on the server.
+ *
+ * This type will be encountered for all non-200 response codes from the
+ * server. The type of error is strongly enumerated and is stored in the
+ * `cause` property. This property can have the following string values:
+ *
+ * conditions-required -- The server is requesting that the client
+ * agree to service conditions before it can obtain a token. The
+ * conditions that must be presented to the user and agreed to are in
+ * the `urls` mapping on the instance. Keys of this mapping are
+ * identifiers. Values are string URLs.
+ *
+ * invalid-credentials -- A token could not be obtained because
+ * the credentials presented by the client were invalid.
+ *
+ * unknown-service -- The requested service was not found.
+ *
+ * malformed-request -- The server rejected the request because it
+ * was invalid. If you see this, code in this file is likely wrong.
+ *
+ * malformed-response -- The response from the server was not what was
+ * expected.
+ *
+ * general -- A general server error has occurred. Clients should
+ * interpret this as an opaque failure.
+ *
+ * @param message
+ * (string) Error message.
+ */
+this.TokenServerClientServerError =
+ function TokenServerClientServerError(message, cause="general") {
+ this.now = new Date().toISOString(); // may be useful to diagnose time-skew issues.
+ this.name = "TokenServerClientServerError";
+ this.message = message || "Server error.";
+ this.cause = cause;
+ this.stack = Error().stack;
+}
+TokenServerClientServerError.prototype = new TokenServerClientError();
+TokenServerClientServerError.prototype.constructor =
+ TokenServerClientServerError;
+
+TokenServerClientServerError.prototype._toStringFields = function() {
+ let fields = {
+ now: this.now,
+ message: this.message,
+ cause: this.cause,
+ };
+ if (this.response) {
+ fields.response_body = this.response.body;
+ fields.response_headers = this.response.headers;
+ fields.response_status = this.response.status;
+ }
+ return fields;
+};
+
+/**
+ * Represents a client to the Token Server.
+ *
+ * http://docs.services.mozilla.com/token/index.html
+ *
+ * The Token Server supports obtaining tokens for arbitrary apps by
+ * constructing URI paths of the form <app>/<app_version>. However, the service
+ * discovery mechanism emphasizes the use of full URIs and tries to not force
+ * the client to manipulate URIs. This client currently enforces this practice
+ * by not implementing an API which would perform URI manipulation.
+ *
+ * If you are tempted to implement this API in the future, consider this your
+ * warning that you may be doing it wrong and that you should store full URIs
+ * instead.
+ *
+ * Areas to Improve:
+ *
+ * - The server sends a JSON response on error. The client does not currently
+ * parse this. It might be convenient if it did.
+ * - Currently most non-200 status codes are rolled into one error type. It
+ * might be helpful if callers had a richer API that communicated who was
+ * at fault (e.g. differentiating a 503 from a 401).
+ */
+this.TokenServerClient = function TokenServerClient() {
+ this._log = Log.repository.getLogger("Common.TokenServerClient");
+ let level = "Debug";
+ try {
+ level = Services.prefs.getCharPref(PREF_LOG_LEVEL);
+ } catch (ex) {}
+ this._log.level = Log.Level[level];
+}
+TokenServerClient.prototype = {
+ /**
+ * Logger instance.
+ */
+ _log: null,
+
+ /**
+ * Obtain a token from a BrowserID assertion against a specific URL.
+ *
+ * This asynchronously obtains the token. The callback receives 2 arguments:
+ *
+ * (TokenServerClientError | null) If no token could be obtained, this
+ * will be a TokenServerClientError instance describing why. The
+ * type seen defines the type of error encountered. If an HTTP response
+ * was seen, a RESTResponse instance will be stored in the `response`
+ * property of this object. If there was no error and a token is
+ * available, this will be null.
+ *
+ * (map | null) On success, this will be a map containing the results from
+ * the server. If there was an error, this will be null. The map has the
+ * following properties:
+ *
+ * id (string) HTTP MAC public key identifier.
+ * key (string) HTTP MAC shared symmetric key.
+ * endpoint (string) URL where service can be connected to.
+ * uid (string) user ID for requested service.
+ * duration (string) the validity duration of the issued token.
+ *
+ * Terms of Service Acceptance
+ * ---------------------------
+ *
+ * Some services require users to accept terms of service before they can
+ * obtain a token. If a service requires ToS acceptance, the error passed
+ * to the callback will be a `TokenServerClientServerError` with the
+ * `cause` property set to "conditions-required". The `urls` property of that
+ * instance will be a map of string keys to string URL values. The user-agent
+ * should prompt the user to accept the content at these URLs.
+ *
+ * Clients signify acceptance of the terms of service by sending a token
+ * request with additional metadata. This is controlled by the
+ * `conditionsAccepted` argument to this function. Clients only need to set
+ * this flag once per service and the server remembers acceptance. If
+ * the conditions for the service change, the server may request
+ * clients agree to terms again. Therefore, clients should always be
+ * prepared to handle a conditions required response.
+ *
+ * Clients should not blindly send acceptance to conditions. Instead, clients
+ * should set `conditionsAccepted` if and only if the server asks for
+ * acceptance, the conditions are displayed to the user, and the user agrees
+ * to them.
+ *
+ * Example Usage
+ * -------------
+ *
+ * let client = new TokenServerClient();
+ * let assertion = getBrowserIDAssertionFromSomewhere();
+ * let url = "https://token.services.mozilla.com/1.0/sync/2.0";
+ *
+ * client.getTokenFromBrowserIDAssertion(url, assertion,
+ * function onResponse(error, result) {
+ * if (error) {
+ * if (error.cause == "conditions-required") {
+ * promptConditionsAcceptance(error.urls, function onAccept() {
+ * client.getTokenFromBrowserIDAssertion(url, assertion,
+ * onResponse, true);
+ * }
+ * return;
+ * }
+ *
+ * // Do other error handling.
+ * return;
+ * }
+ *
+ * let {
+ * id: id, key: key, uid: uid, endpoint: endpoint, duration: duration
+ * } = result;
+ * // Do stuff with data and carry on.
+ * });
+ *
+ * @param url
+ * (string) URL to fetch token from.
+ * @param assertion
+ * (string) BrowserID assertion to exchange token for.
+ * @param cb
+ * (function) Callback to be invoked with result of operation.
+ * @param conditionsAccepted
+ * (bool) Whether to send acceptance to service conditions.
+ */
+ getTokenFromBrowserIDAssertion:
+ function getTokenFromBrowserIDAssertion(url, assertion, cb, addHeaders={}) {
+ if (!url) {
+ throw new TokenServerClientError("url argument is not valid.");
+ }
+
+ if (!assertion) {
+ throw new TokenServerClientError("assertion argument is not valid.");
+ }
+
+ if (!cb) {
+ throw new TokenServerClientError("cb argument is not valid.");
+ }
+
+ this._log.debug("Beginning BID assertion exchange: " + url);
+
+ let req = this.newRESTRequest(url);
+ req.setHeader("Accept", "application/json");
+ req.setHeader("Authorization", "BrowserID " + assertion);
+
+ for (let header in addHeaders) {
+ req.setHeader(header, addHeaders[header]);
+ }
+
+ let client = this;
+ req.get(function onResponse(error) {
+ if (error) {
+ cb(new TokenServerClientNetworkError(error), null);
+ return;
+ }
+
+ let self = this;
+ function callCallback(error, result) {
+ if (!cb) {
+ self._log.warn("Callback already called! Did it throw?");
+ return;
+ }
+
+ try {
+ cb(error, result);
+ } catch (ex) {
+ self._log.warn("Exception when calling user-supplied callback", ex);
+ }
+
+ cb = null;
+ }
+
+ try {
+ client._processTokenResponse(this.response, callCallback);
+ } catch (ex) {
+ this._log.warn("Error processing token server response", ex);
+
+ let error = new TokenServerClientError(ex);
+ error.response = this.response;
+ callCallback(error, null);
+ }
+ });
+ },
+
+ /**
+ * Handler to process token request responses.
+ *
+ * @param response
+ * RESTResponse from token HTTP request.
+ * @param cb
+ * The original callback passed to the public API.
+ */
+ _processTokenResponse: function processTokenResponse(response, cb) {
+ this._log.debug("Got token response: " + response.status);
+
+ // Responses should *always* be JSON, even in the case of 4xx and 5xx
+ // errors. If we don't see JSON, the server is likely very unhappy.
+ let ct = response.headers["content-type"] || "";
+ if (ct != "application/json" && !ct.startsWith("application/json;")) {
+ this._log.warn("Did not receive JSON response. Misconfigured server?");
+ this._log.debug("Content-Type: " + ct);
+ this._log.debug("Body: " + response.body);
+
+ let error = new TokenServerClientServerError("Non-JSON response.",
+ "malformed-response");
+ error.response = response;
+ cb(error, null);
+ return;
+ }
+
+ let result;
+ try {
+ result = JSON.parse(response.body);
+ } catch (ex) {
+ this._log.warn("Invalid JSON returned by server: " + response.body);
+ let error = new TokenServerClientServerError("Malformed JSON.",
+ "malformed-response");
+ error.response = response;
+ cb(error, null);
+ return;
+ }
+
+ // Any response status can have X-Backoff or X-Weave-Backoff headers.
+ this._maybeNotifyBackoff(response, "x-weave-backoff");
+ this._maybeNotifyBackoff(response, "x-backoff");
+
+ // The service shouldn't have any 3xx, so we don't need to handle those.
+ if (response.status != 200) {
+ // We /should/ have a Cornice error report in the JSON. We log that to
+ // help with debugging.
+ if ("errors" in result) {
+ // This could throw, but this entire function is wrapped in a try. If
+ // the server is sending something not an array of objects, it has
+ // failed to keep its contract with us and there is little we can do.
+ for (let error of result.errors) {
+ this._log.info("Server-reported error: " + JSON.stringify(error));
+ }
+ }
+
+ let error = new TokenServerClientServerError();
+ error.response = response;
+
+ if (response.status == 400) {
+ error.message = "Malformed request.";
+ error.cause = "malformed-request";
+ } else if (response.status == 401) {
+ // Cause can be invalid-credentials, invalid-timestamp, or
+ // invalid-generation.
+ error.message = "Authentication failed.";
+ error.cause = result.status;
+ }
+
+ // 403 should represent a "condition acceptance needed" response.
+ //
+ // The extra validation of "urls" is important. We don't want to signal
+ // conditions required unless we are absolutely sure that is what the
+ // server is asking for.
+ else if (response.status == 403) {
+ if (!("urls" in result)) {
+ this._log.warn("403 response without proper fields!");
+ this._log.warn("Response body: " + response.body);
+
+ error.message = "Missing JSON fields.";
+ error.cause = "malformed-response";
+ } else if (typeof(result.urls) != "object") {
+ error.message = "urls field is not a map.";
+ error.cause = "malformed-response";
+ } else {
+ error.message = "Conditions must be accepted.";
+ error.cause = "conditions-required";
+ error.urls = result.urls;
+ }
+ } else if (response.status == 404) {
+ error.message = "Unknown service.";
+ error.cause = "unknown-service";
+ }
+
+ // A Retry-After header should theoretically only appear on a 503, but
+ // we'll look for it on any error response.
+ this._maybeNotifyBackoff(response, "retry-after");
+
+ cb(error, null);
+ return;
+ }
+
+ for (let k of ["id", "key", "api_endpoint", "uid", "duration"]) {
+ if (!(k in result)) {
+ let error = new TokenServerClientServerError("Expected key not " +
+ " present in result: " +
+ k);
+ error.cause = "malformed-response";
+ error.response = response;
+ cb(error, null);
+ return;
+ }
+ }
+
+ this._log.debug("Successful token response");
+ cb(null, {
+ id: result.id,
+ key: result.key,
+ endpoint: result.api_endpoint,
+ uid: result.uid,
+ duration: result.duration,
+ hashed_fxa_uid: result.hashed_fxa_uid,
+ });
+ },
+
+ /*
+ * The prefix used for all notifications sent by this module. This
+ * allows the handler of notifications to be sure they are handling
+ * notifications for the service they expect.
+ *
+ * If not set, no notifications will be sent.
+ */
+ observerPrefix: null,
+
+ // Given an optional header value, notify that a backoff has been requested.
+ _maybeNotifyBackoff: function (response, headerName) {
+ if (!this.observerPrefix) {
+ return;
+ }
+ let headerVal = response.headers[headerName];
+ if (!headerVal) {
+ return;
+ }
+ let backoffInterval;
+ try {
+ backoffInterval = parseInt(headerVal, 10);
+ } catch (ex) {
+ this._log.error("TokenServer response had invalid backoff value in '" +
+ headerName + "' header: " + headerVal);
+ return;
+ }
+ Observers.notify(this.observerPrefix + ":backoff:interval", backoffInterval);
+ },
+
+ // override points for testing.
+ newRESTRequest: function(url) {
+ return new RESTRequest(url);
+ }
+};
diff --git a/services/common/utils.js b/services/common/utils.js
new file mode 100644
index 000000000..f0f57d14a
--- /dev/null
+++ b/services/common/utils.js
@@ -0,0 +1,645 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
+
+this.EXPORTED_SYMBOLS = ["CommonUtils"];
+
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/osfile.jsm")
+Cu.import("resource://gre/modules/Log.jsm");
+
+this.CommonUtils = {
+ /*
+ * Set manipulation methods. These should be lifted into toolkit, or added to
+ * `Set` itself.
+ */
+
+ /**
+ * Return elements of `a` or `b`.
+ */
+ union: function (a, b) {
+ let out = new Set(a);
+ for (let x of b) {
+ out.add(x);
+ }
+ return out;
+ },
+
+ /**
+ * Return elements of `a` that are not present in `b`.
+ */
+ difference: function (a, b) {
+ let out = new Set(a);
+ for (let x of b) {
+ out.delete(x);
+ }
+ return out;
+ },
+
+ /**
+ * Return elements of `a` that are also in `b`.
+ */
+ intersection: function (a, b) {
+ let out = new Set();
+ for (let x of a) {
+ if (b.has(x)) {
+ out.add(x);
+ }
+ }
+ return out;
+ },
+
+ /**
+ * Return true if `a` and `b` are the same size, and
+ * every element of `a` is in `b`.
+ */
+ setEqual: function (a, b) {
+ if (a.size != b.size) {
+ return false;
+ }
+ for (let x of a) {
+ if (!b.has(x)) {
+ return false;
+ }
+ }
+ return true;
+ },
+
+ /**
+ * Encode byte string as base64URL (RFC 4648).
+ *
+ * @param bytes
+ * (string) Raw byte string to encode.
+ * @param pad
+ * (bool) Whether to include padding characters (=). Defaults
+ * to true for historical reasons.
+ */
+ encodeBase64URL: function encodeBase64URL(bytes, pad=true) {
+ let s = btoa(bytes).replace(/\+/g, "-").replace(/\//g, "_");
+
+ if (!pad) {
+ return s.replace(/=+$/, "");
+ }
+
+ return s;
+ },
+
+ /**
+ * Create a nsIURI instance from a string.
+ */
+ makeURI: function makeURI(URIString) {
+ if (!URIString)
+ return null;
+ try {
+ return Services.io.newURI(URIString, null, null);
+ } catch (e) {
+ let log = Log.repository.getLogger("Common.Utils");
+ log.debug("Could not create URI", e);
+ return null;
+ }
+ },
+
+ /**
+ * Execute a function on the next event loop tick.
+ *
+ * @param callback
+ * Function to invoke.
+ * @param thisObj [optional]
+ * Object to bind the callback to.
+ */
+ nextTick: function nextTick(callback, thisObj) {
+ if (thisObj) {
+ callback = callback.bind(thisObj);
+ }
+ Services.tm.currentThread.dispatch(callback, Ci.nsIThread.DISPATCH_NORMAL);
+ },
+
+ /**
+ * Return a promise resolving on some later tick.
+ *
+ * This a wrapper around Promise.resolve() that prevents stack
+ * accumulation and prevents callers from accidentally relying on
+ * same-tick promise resolution.
+ */
+ laterTickResolvingPromise: function (value, prototype) {
+ let deferred = Promise.defer(prototype);
+ this.nextTick(deferred.resolve.bind(deferred, value));
+ return deferred.promise;
+ },
+
+ /**
+ * Spin the event loop and return once the next tick is executed.
+ *
+ * This is an evil function and should not be used in production code. It
+ * exists in this module for ease-of-use.
+ */
+ waitForNextTick: function waitForNextTick() {
+ let cb = Async.makeSyncCallback();
+ this.nextTick(cb);
+ Async.waitForSyncCallback(cb);
+
+ return;
+ },
+
+ /**
+ * Return a timer that is scheduled to call the callback after waiting the
+ * provided time or as soon as possible. The timer will be set as a property
+ * of the provided object with the given timer name.
+ */
+ namedTimer: function namedTimer(callback, wait, thisObj, name) {
+ if (!thisObj || !name) {
+ throw "You must provide both an object and a property name for the timer!";
+ }
+
+ // Delay an existing timer if it exists
+ if (name in thisObj && thisObj[name] instanceof Ci.nsITimer) {
+ thisObj[name].delay = wait;
+ return;
+ }
+
+ // Create a special timer that we can add extra properties
+ let timer = Object.create(Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer));
+
+ // Provide an easy way to clear out the timer
+ timer.clear = function() {
+ thisObj[name] = null;
+ timer.cancel();
+ };
+
+ // Initialize the timer with a smart callback
+ timer.initWithCallback({
+ notify: function notify() {
+ // Clear out the timer once it's been triggered
+ timer.clear();
+ callback.call(thisObj, timer);
+ }
+ }, wait, timer.TYPE_ONE_SHOT);
+
+ return thisObj[name] = timer;
+ },
+
+ encodeUTF8: function encodeUTF8(str) {
+ try {
+ str = this._utf8Converter.ConvertFromUnicode(str);
+ return str + this._utf8Converter.Finish();
+ } catch (ex) {
+ return null;
+ }
+ },
+
+ decodeUTF8: function decodeUTF8(str) {
+ try {
+ str = this._utf8Converter.ConvertToUnicode(str);
+ return str + this._utf8Converter.Finish();
+ } catch (ex) {
+ return null;
+ }
+ },
+
+ byteArrayToString: function byteArrayToString(bytes) {
+ return bytes.map(byte => String.fromCharCode(byte)).join("");
+ },
+
+ stringToByteArray: function stringToByteArray(bytesString) {
+ return Array.prototype.slice.call(bytesString).map(c => c.charCodeAt(0));
+ },
+
+ bytesAsHex: function bytesAsHex(bytes) {
+ return Array.prototype.slice.call(bytes).map(c => ("0" + c.charCodeAt(0).toString(16)).slice(-2)).join("");
+ },
+
+ stringAsHex: function stringAsHex(str) {
+ return CommonUtils.bytesAsHex(CommonUtils.encodeUTF8(str));
+ },
+
+ stringToBytes: function stringToBytes(str) {
+ return CommonUtils.hexToBytes(CommonUtils.stringAsHex(str));
+ },
+
+ hexToBytes: function hexToBytes(str) {
+ let bytes = [];
+ for (let i = 0; i < str.length - 1; i += 2) {
+ bytes.push(parseInt(str.substr(i, 2), 16));
+ }
+ return String.fromCharCode.apply(String, bytes);
+ },
+
+ hexAsString: function hexAsString(hex) {
+ return CommonUtils.decodeUTF8(CommonUtils.hexToBytes(hex));
+ },
+
+ /**
+ * Base32 encode (RFC 4648) a string
+ */
+ encodeBase32: function encodeBase32(bytes) {
+ const key = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
+ let quanta = Math.floor(bytes.length / 5);
+ let leftover = bytes.length % 5;
+
+ // Pad the last quantum with zeros so the length is a multiple of 5.
+ if (leftover) {
+ quanta += 1;
+ for (let i = leftover; i < 5; i++)
+ bytes += "\0";
+ }
+
+ // Chop the string into quanta of 5 bytes (40 bits). Each quantum
+ // is turned into 8 characters from the 32 character base.
+ let ret = "";
+ for (let i = 0; i < bytes.length; i += 5) {
+ let c = Array.prototype.slice.call(bytes.slice(i, i + 5)).map(byte => byte.charCodeAt(0));
+ ret += key[c[0] >> 3]
+ + key[((c[0] << 2) & 0x1f) | (c[1] >> 6)]
+ + key[(c[1] >> 1) & 0x1f]
+ + key[((c[1] << 4) & 0x1f) | (c[2] >> 4)]
+ + key[((c[2] << 1) & 0x1f) | (c[3] >> 7)]
+ + key[(c[3] >> 2) & 0x1f]
+ + key[((c[3] << 3) & 0x1f) | (c[4] >> 5)]
+ + key[c[4] & 0x1f];
+ }
+
+ switch (leftover) {
+ case 1:
+ return ret.slice(0, -6) + "======";
+ case 2:
+ return ret.slice(0, -4) + "====";
+ case 3:
+ return ret.slice(0, -3) + "===";
+ case 4:
+ return ret.slice(0, -1) + "=";
+ default:
+ return ret;
+ }
+ },
+
+ /**
+ * Base32 decode (RFC 4648) a string.
+ */
+ decodeBase32: function decodeBase32(str) {
+ const key = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
+
+ let padChar = str.indexOf("=");
+ let chars = (padChar == -1) ? str.length : padChar;
+ let bytes = Math.floor(chars * 5 / 8);
+ let blocks = Math.ceil(chars / 8);
+
+ // Process a chunk of 5 bytes / 8 characters.
+ // The processing of this is known in advance,
+ // so avoid arithmetic!
+ function processBlock(ret, cOffset, rOffset) {
+ let c, val;
+
+ // N.B., this relies on
+ // undefined | foo == foo.
+ function accumulate(val) {
+ ret[rOffset] |= val;
+ }
+
+ function advance() {
+ c = str[cOffset++];
+ if (!c || c == "" || c == "=") // Easier than range checking.
+ throw "Done"; // Will be caught far away.
+ val = key.indexOf(c);
+ if (val == -1)
+ throw "Unknown character in base32: " + c;
+ }
+
+ // Handle a left shift, restricted to bytes.
+ function left(octet, shift) {
+ return (octet << shift) & 0xff;
+ }
+
+ advance();
+ accumulate(left(val, 3));
+ advance();
+ accumulate(val >> 2);
+ ++rOffset;
+ accumulate(left(val, 6));
+ advance();
+ accumulate(left(val, 1));
+ advance();
+ accumulate(val >> 4);
+ ++rOffset;
+ accumulate(left(val, 4));
+ advance();
+ accumulate(val >> 1);
+ ++rOffset;
+ accumulate(left(val, 7));
+ advance();
+ accumulate(left(val, 2));
+ advance();
+ accumulate(val >> 3);
+ ++rOffset;
+ accumulate(left(val, 5));
+ advance();
+ accumulate(val);
+ ++rOffset;
+ }
+
+ // Our output. Define to be explicit (and maybe the compiler will be smart).
+ let ret = new Array(bytes);
+ let i = 0;
+ let cOff = 0;
+ let rOff = 0;
+
+ for (; i < blocks; ++i) {
+ try {
+ processBlock(ret, cOff, rOff);
+ } catch (ex) {
+ // Handle the detection of padding.
+ if (ex == "Done")
+ break;
+ throw ex;
+ }
+ cOff += 8;
+ rOff += 5;
+ }
+
+ // Slice in case our shift overflowed to the right.
+ return CommonUtils.byteArrayToString(ret.slice(0, bytes));
+ },
+
+ /**
+ * Trim excess padding from a Base64 string and atob().
+ *
+ * See bug 562431 comment 4.
+ */
+ safeAtoB: function safeAtoB(b64) {
+ let len = b64.length;
+ let over = len % 4;
+ return over ? atob(b64.substr(0, len - over)) : atob(b64);
+ },
+
+ /**
+ * Parses a JSON file from disk using OS.File and promises.
+ *
+ * @param path the file to read. Will be passed to `OS.File.read()`.
+ * @return a promise that resolves to the JSON contents of the named file.
+ */
+ readJSON: function(path) {
+ return OS.File.read(path, { encoding: "utf-8" }).then((data) => {
+ return JSON.parse(data);
+ });
+ },
+
+ /**
+ * Write a JSON object to the named file using OS.File and promises.
+ *
+ * @param contents a JS object. Will be serialized.
+ * @param path the path of the file to write.
+ * @return a promise, as produced by OS.File.writeAtomic.
+ */
+ writeJSON: function(contents, path) {
+ let data = JSON.stringify(contents);
+ return OS.File.writeAtomic(path, data, {encoding: "utf-8", tmpPath: path + ".tmp"});
+ },
+
+
+ /**
+ * Ensure that the specified value is defined in integer milliseconds since
+ * UNIX epoch.
+ *
+ * This throws an error if the value is not an integer, is negative, or looks
+ * like seconds, not milliseconds.
+ *
+ * If the value is null or 0, no exception is raised.
+ *
+ * @param value
+ * Value to validate.
+ */
+ ensureMillisecondsTimestamp: function ensureMillisecondsTimestamp(value) {
+ if (!value) {
+ return;
+ }
+
+ if (!/^[0-9]+$/.test(value)) {
+ throw new Error("Timestamp value is not a positive integer: " + value);
+ }
+
+ let intValue = parseInt(value, 10);
+
+ if (!intValue) {
+ return;
+ }
+
+ // Catch what looks like seconds, not milliseconds.
+ if (intValue < 10000000000) {
+ throw new Error("Timestamp appears to be in seconds: " + intValue);
+ }
+ },
+
+ /**
+ * Read bytes from an nsIInputStream into a string.
+ *
+ * @param stream
+ * (nsIInputStream) Stream to read from.
+ * @param count
+ * (number) Integer number of bytes to read. If not defined, or
+ * 0, all available input is read.
+ */
+ readBytesFromInputStream: function readBytesFromInputStream(stream, count) {
+ let BinaryInputStream = Components.Constructor(
+ "@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream",
+ "setInputStream");
+ if (!count) {
+ count = stream.available();
+ }
+
+ return new BinaryInputStream(stream).readBytes(count);
+ },
+
+ /**
+ * Generate a new UUID using nsIUUIDGenerator.
+ *
+ * Example value: "1e00a2e2-1570-443e-bf5e-000354124234"
+ *
+ * @return string A hex-formatted UUID string.
+ */
+ generateUUID: function generateUUID() {
+ let uuid = Cc["@mozilla.org/uuid-generator;1"]
+ .getService(Ci.nsIUUIDGenerator)
+ .generateUUID()
+ .toString();
+
+ return uuid.substring(1, uuid.length - 1);
+ },
+
+ /**
+ * Obtain an epoch value from a preference.
+ *
+ * This reads a string preference and returns an integer. The string
+ * preference is expected to contain the integer milliseconds since epoch.
+ * For best results, only read preferences that have been saved with
+ * setDatePref().
+ *
+ * We need to store times as strings because integer preferences are only
+ * 32 bits and likely overflow most dates.
+ *
+ * If the pref contains a non-integer value, the specified default value will
+ * be returned.
+ *
+ * @param branch
+ * (Preferences) Branch from which to retrieve preference.
+ * @param pref
+ * (string) The preference to read from.
+ * @param def
+ * (Number) The default value to use if the preference is not defined.
+ * @param log
+ * (Log.Logger) Logger to write warnings to.
+ */
+ getEpochPref: function getEpochPref(branch, pref, def=0, log=null) {
+ if (!Number.isInteger(def)) {
+ throw new Error("Default value is not a number: " + def);
+ }
+
+ let valueStr = branch.get(pref, null);
+
+ if (valueStr !== null) {
+ let valueInt = parseInt(valueStr, 10);
+ if (Number.isNaN(valueInt)) {
+ if (log) {
+ log.warn("Preference value is not an integer. Using default. " +
+ pref + "=" + valueStr + " -> " + def);
+ }
+
+ return def;
+ }
+
+ return valueInt;
+ }
+
+ return def;
+ },
+
+ /**
+ * Obtain a Date from a preference.
+ *
+ * This is a wrapper around getEpochPref. It converts the value to a Date
+ * instance and performs simple range checking.
+ *
+ * The range checking ensures the date is newer than the oldestYear
+ * parameter.
+ *
+ * @param branch
+ * (Preferences) Branch from which to read preference.
+ * @param pref
+ * (string) The preference from which to read.
+ * @param def
+ * (Number) The default value (in milliseconds) if the preference is
+ * not defined or invalid.
+ * @param log
+ * (Log.Logger) Logger to write warnings to.
+ * @param oldestYear
+ * (Number) Oldest year to accept in read values.
+ */
+ getDatePref: function getDatePref(branch, pref, def=0, log=null,
+ oldestYear=2010) {
+
+ let valueInt = this.getEpochPref(branch, pref, def, log);
+ let date = new Date(valueInt);
+
+ if (valueInt == def || date.getFullYear() >= oldestYear) {
+ return date;
+ }
+
+ if (log) {
+ log.warn("Unexpected old date seen in pref. Returning default: " +
+ pref + "=" + date + " -> " + def);
+ }
+
+ return new Date(def);
+ },
+
+ /**
+ * Store a Date in a preference.
+ *
+ * This is the opposite of getDatePref(). The same notes apply.
+ *
+ * If the range check fails, an Error will be thrown instead of a default
+ * value silently being used.
+ *
+ * @param branch
+ * (Preference) Branch from which to read preference.
+ * @param pref
+ * (string) Name of preference to write to.
+ * @param date
+ * (Date) The value to save.
+ * @param oldestYear
+ * (Number) The oldest year to accept for values.
+ */
+ setDatePref: function setDatePref(branch, pref, date, oldestYear=2010) {
+ if (date.getFullYear() < oldestYear) {
+ throw new Error("Trying to set " + pref + " to a very old time: " +
+ date + ". The current time is " + new Date() +
+ ". Is the system clock wrong?");
+ }
+
+ branch.set(pref, "" + date.getTime());
+ },
+
+ /**
+ * Convert a string between two encodings.
+ *
+ * Output is only guaranteed if the input stream is composed of octets. If
+ * the input string has characters with values larger than 255, data loss
+ * will occur.
+ *
+ * The returned string is guaranteed to consist of character codes no greater
+ * than 255.
+ *
+ * @param s
+ * (string) The source string to convert.
+ * @param source
+ * (string) The current encoding of the string.
+ * @param dest
+ * (string) The target encoding of the string.
+ *
+ * @return string
+ */
+ convertString: function convertString(s, source, dest) {
+ if (!s) {
+ throw new Error("Input string must be defined.");
+ }
+
+ let is = Cc["@mozilla.org/io/string-input-stream;1"]
+ .createInstance(Ci.nsIStringInputStream);
+ is.setData(s, s.length);
+
+ let listener = Cc["@mozilla.org/network/stream-loader;1"]
+ .createInstance(Ci.nsIStreamLoader);
+
+ let result;
+
+ listener.init({
+ onStreamComplete: function onStreamComplete(loader, context, status,
+ length, data) {
+ result = String.fromCharCode.apply(this, data);
+ },
+ });
+
+ let converter = this._converterService.asyncConvertData(source, dest,
+ listener, null);
+ converter.onStartRequest(null, null);
+ converter.onDataAvailable(null, null, is, 0, s.length);
+ converter.onStopRequest(null, null, null);
+
+ return result;
+ },
+};
+
+XPCOMUtils.defineLazyGetter(CommonUtils, "_utf8Converter", function() {
+ let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"]
+ .createInstance(Ci.nsIScriptableUnicodeConverter);
+ converter.charset = "UTF-8";
+ return converter;
+});
+
+XPCOMUtils.defineLazyGetter(CommonUtils, "_converterService", function() {
+ return Cc["@mozilla.org/streamConverters;1"]
+ .getService(Ci.nsIStreamConverterService);
+});