summaryrefslogtreecommitdiffstats
path: root/toolkit/components/url-classifier/tests/unit
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /toolkit/components/url-classifier/tests/unit
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloadUXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip
Add m-esr52 at 52.6.0
Diffstat (limited to 'toolkit/components/url-classifier/tests/unit')
-rw-r--r--toolkit/components/url-classifier/tests/unit/.eslintrc.js7
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest1.chunkbin0 -> 939 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest2.chunk2
-rw-r--r--toolkit/components/url-classifier/tests/unit/head_urlclassifier.js429
-rw-r--r--toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js1
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_addsub.js488
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_backoff.js89
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js32
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_dbservice.js314
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_digest256.js147
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_hashcompleter.js403
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_listmanager.js376
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_partial.js825
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_pref.js14
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_prefixset.js232
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_provider_url.js34
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js23
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_streamupdater.js288
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js37
-rw-r--r--toolkit/components/url-classifier/tests/unit/xpcshell.ini24
20 files changed, 3765 insertions, 0 deletions
diff --git a/toolkit/components/url-classifier/tests/unit/.eslintrc.js b/toolkit/components/url-classifier/tests/unit/.eslintrc.js
new file mode 100644
index 000000000..d35787cd2
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/.eslintrc.js
@@ -0,0 +1,7 @@
+"use strict";
+
+module.exports = {
+ "extends": [
+ "../../../../../testing/xpcshell/xpcshell.eslintrc.js"
+ ]
+};
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest1.chunk b/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
new file mode 100644
index 000000000..3850373c1
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest2.chunk b/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
new file mode 100644
index 000000000..738c96f6b
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
@@ -0,0 +1,2 @@
+a:5:32:32
+“Ê_Há^˜aÍ7ÂÙ]´=#ÌnmåÃøún‹æo—ÌQ‰ \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js b/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
new file mode 100644
index 000000000..21849ced7
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
@@ -0,0 +1,429 @@
+//* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- *
+function dumpn(s) {
+ dump(s + "\n");
+}
+
+const NS_APP_USER_PROFILE_50_DIR = "ProfD";
+const NS_APP_USER_PROFILE_LOCAL_50_DIR = "ProfLD";
+
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+var Cr = Components.results;
+
+Cu.import("resource://testing-common/httpd.js");
+
+do_get_profile();
+
+var dirSvc = Cc["@mozilla.org/file/directory_service;1"].getService(Ci.nsIProperties);
+
+var iosvc = Cc["@mozilla.org/network/io-service;1"].getService(Ci.nsIIOService);
+
+var secMan = Cc["@mozilla.org/scriptsecuritymanager;1"]
+ .getService(Ci.nsIScriptSecurityManager);
+
+// Disable hashcompleter noise for tests
+var prefBranch = Cc["@mozilla.org/preferences-service;1"].
+ getService(Ci.nsIPrefBranch);
+prefBranch.setIntPref("urlclassifier.gethashnoise", 0);
+
+// Enable malware/phishing checking for tests
+prefBranch.setBoolPref("browser.safebrowsing.malware.enabled", true);
+prefBranch.setBoolPref("browser.safebrowsing.blockedURIs.enabled", true);
+prefBranch.setBoolPref("browser.safebrowsing.phishing.enabled", true);
+
+// Enable all completions for tests
+prefBranch.setCharPref("urlclassifier.disallow_completions", "");
+
+// Hash completion timeout
+prefBranch.setIntPref("urlclassifier.gethash.timeout_ms", 5000);
+
+function delFile(name) {
+ try {
+ // Delete a previously created sqlite file
+ var file = dirSvc.get('ProfLD', Ci.nsIFile);
+ file.append(name);
+ if (file.exists())
+ file.remove(false);
+ } catch(e) {
+ }
+}
+
+function cleanUp() {
+ delFile("urlclassifier3.sqlite");
+ delFile("safebrowsing/classifier.hashkey");
+ delFile("safebrowsing/test-phish-simple.sbstore");
+ delFile("safebrowsing/test-malware-simple.sbstore");
+ delFile("safebrowsing/test-unwanted-simple.sbstore");
+ delFile("safebrowsing/test-block-simple.sbstore");
+ delFile("safebrowsing/test-track-simple.sbstore");
+ delFile("safebrowsing/test-trackwhite-simple.sbstore");
+ delFile("safebrowsing/test-phish-simple.pset");
+ delFile("safebrowsing/test-malware-simple.pset");
+ delFile("safebrowsing/test-unwanted-simple.pset");
+ delFile("safebrowsing/test-block-simple.pset");
+ delFile("safebrowsing/test-track-simple.pset");
+ delFile("safebrowsing/test-trackwhite-simple.pset");
+ delFile("safebrowsing/moz-phish-simple.sbstore");
+ delFile("safebrowsing/moz-phish-simple.pset");
+ delFile("testLarge.pset");
+ delFile("testNoDelta.pset");
+}
+
+// Update uses allTables by default
+var allTables = "test-phish-simple,test-malware-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple";
+var mozTables = "moz-phish-simple";
+
+var dbservice = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(Ci.nsIUrlClassifierDBService);
+var streamUpdater = Cc["@mozilla.org/url-classifier/streamupdater;1"]
+ .getService(Ci.nsIUrlClassifierStreamUpdater);
+
+
+/*
+ * Builds an update from an object that looks like:
+ *{ "test-phish-simple" : [{
+ * "chunkType" : "a", // 'a' is assumed if not specified
+ * "chunkNum" : 1, // numerically-increasing chunk numbers are assumed
+ * // if not specified
+ * "urls" : [ "foo.com/a", "foo.com/b", "bar.com/" ]
+ * }
+ */
+
+function buildUpdate(update, hashSize) {
+ if (!hashSize) {
+ hashSize = 32;
+ }
+ var updateStr = "n:1000\n";
+
+ for (var tableName in update) {
+ if (tableName != "")
+ updateStr += "i:" + tableName + "\n";
+ var chunks = update[tableName];
+ for (var j = 0; j < chunks.length; j++) {
+ var chunk = chunks[j];
+ var chunkType = chunk.chunkType ? chunk.chunkType : 'a';
+ var chunkNum = chunk.chunkNum ? chunk.chunkNum : j;
+ updateStr += chunkType + ':' + chunkNum + ':' + hashSize;
+
+ if (chunk.urls) {
+ var chunkData = chunk.urls.join("\n");
+ updateStr += ":" + chunkData.length + "\n" + chunkData;
+ }
+
+ updateStr += "\n";
+ }
+ }
+
+ return updateStr;
+}
+
+function buildPhishingUpdate(chunks, hashSize) {
+ return buildUpdate({"test-phish-simple" : chunks}, hashSize);
+}
+
+function buildMalwareUpdate(chunks, hashSize) {
+ return buildUpdate({"test-malware-simple" : chunks}, hashSize);
+}
+
+function buildUnwantedUpdate(chunks, hashSize) {
+ return buildUpdate({"test-unwanted-simple" : chunks}, hashSize);
+}
+
+function buildBlockedUpdate(chunks, hashSize) {
+ return buildUpdate({"test-block-simple" : chunks}, hashSize);
+}
+
+function buildMozPhishingUpdate(chunks, hashSize) {
+ return buildUpdate({"moz-phish-simple" : chunks}, hashSize);
+}
+
+function buildBareUpdate(chunks, hashSize) {
+ return buildUpdate({"" : chunks}, hashSize);
+}
+
+/**
+ * Performs an update of the dbservice manually, bypassing the stream updater
+ */
+function doSimpleUpdate(updateText, success, failure) {
+ var listener = {
+ QueryInterface: function(iid)
+ {
+ if (iid.equals(Ci.nsISupports) ||
+ iid.equals(Ci.nsIUrlClassifierUpdateObserver))
+ return this;
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+ updateUrlRequested: function(url) { },
+ streamFinished: function(status) { },
+ updateError: function(errorCode) { failure(errorCode); },
+ updateSuccess: function(requestedTimeout) { success(requestedTimeout); }
+ };
+
+ dbservice.beginUpdate(listener, allTables);
+ dbservice.beginStream("", "");
+ dbservice.updateStream(updateText);
+ dbservice.finishStream();
+ dbservice.finishUpdate();
+}
+
+/**
+ * Simulates a failed database update.
+ */
+function doErrorUpdate(tables, success, failure) {
+ var listener = {
+ QueryInterface: function(iid)
+ {
+ if (iid.equals(Ci.nsISupports) ||
+ iid.equals(Ci.nsIUrlClassifierUpdateObserver))
+ return this;
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ },
+
+ updateUrlRequested: function(url) { },
+ streamFinished: function(status) { },
+ updateError: function(errorCode) { success(errorCode); },
+ updateSuccess: function(requestedTimeout) { failure(requestedTimeout); }
+ };
+
+ dbservice.beginUpdate(listener, tables, null);
+ dbservice.beginStream("", "");
+ dbservice.cancelUpdate();
+}
+
+/**
+ * Performs an update of the dbservice using the stream updater and a
+ * data: uri
+ */
+function doStreamUpdate(updateText, success, failure, downloadFailure) {
+ var dataUpdate = "data:," + encodeURIComponent(updateText);
+
+ if (!downloadFailure) {
+ downloadFailure = failure;
+ }
+
+ streamUpdater.downloadUpdates(allTables, "", true,
+ dataUpdate, success, failure, downloadFailure);
+}
+
+var gAssertions = {
+
+tableData : function(expectedTables, cb)
+{
+ dbservice.getTables(function(tables) {
+ // rebuild the tables in a predictable order.
+ var parts = tables.split("\n");
+ while (parts[parts.length - 1] == '') {
+ parts.pop();
+ }
+ parts.sort();
+ tables = parts.join("\n");
+
+ do_check_eq(tables, expectedTables);
+ cb();
+ });
+},
+
+checkUrls: function(urls, expected, cb, useMoz = false)
+{
+ // work with a copy of the list.
+ urls = urls.slice(0);
+ var doLookup = function() {
+ if (urls.length > 0) {
+ var tables = useMoz ? mozTables : allTables;
+ var fragment = urls.shift();
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + fragment, null, null), {});
+ dbservice.lookup(principal, tables,
+ function(arg) {
+ do_check_eq(expected, arg);
+ doLookup();
+ }, true);
+ } else {
+ cb();
+ }
+ };
+ doLookup();
+},
+
+checkTables: function(url, expected, cb)
+{
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + url, null, null), {});
+ dbservice.lookup(principal, allTables, function(tables) {
+ // Rebuild tables in a predictable order.
+ var parts = tables.split(",");
+ while (parts[parts.length - 1] == '') {
+ parts.pop();
+ }
+ parts.sort();
+ tables = parts.join(",");
+ do_check_eq(tables, expected);
+ cb();
+ }, true);
+},
+
+urlsDontExist: function(urls, cb)
+{
+ this.checkUrls(urls, '', cb);
+},
+
+urlsExist: function(urls, cb)
+{
+ this.checkUrls(urls, 'test-phish-simple', cb);
+},
+
+malwareUrlsExist: function(urls, cb)
+{
+ this.checkUrls(urls, 'test-malware-simple', cb);
+},
+
+unwantedUrlsExist: function(urls, cb)
+{
+ this.checkUrls(urls, 'test-unwanted-simple', cb);
+},
+
+blockedUrlsExist: function(urls, cb)
+{
+ this.checkUrls(urls, 'test-block-simple', cb);
+},
+
+mozPhishingUrlsExist: function(urls, cb)
+{
+ this.checkUrls(urls, 'moz-phish-simple', cb, true);
+},
+
+subsDontExist: function(urls, cb)
+{
+ // XXX: there's no interface for checking items in the subs table
+ cb();
+},
+
+subsExist: function(urls, cb)
+{
+ // XXX: there's no interface for checking items in the subs table
+ cb();
+},
+
+urlExistInMultipleTables: function(data, cb)
+{
+ this.checkTables(data["url"], data["tables"], cb);
+}
+
+};
+
+/**
+ * Check a set of assertions against the gAssertions table.
+ */
+function checkAssertions(assertions, doneCallback)
+{
+ var checkAssertion = function() {
+ for (var i in assertions) {
+ var data = assertions[i];
+ delete assertions[i];
+ gAssertions[i](data, checkAssertion);
+ return;
+ }
+
+ doneCallback();
+ }
+
+ checkAssertion();
+}
+
+function updateError(arg)
+{
+ do_throw(arg);
+}
+
+// Runs a set of updates, and then checks a set of assertions.
+function doUpdateTest(updates, assertions, successCallback, errorCallback) {
+ var errorUpdate = function() {
+ checkAssertions(assertions, errorCallback);
+ }
+
+ var runUpdate = function() {
+ if (updates.length > 0) {
+ var update = updates.shift();
+ doStreamUpdate(update, runUpdate, errorUpdate, null);
+ } else {
+ checkAssertions(assertions, successCallback);
+ }
+ }
+
+ runUpdate();
+}
+
+var gTests;
+var gNextTest = 0;
+
+function runNextTest()
+{
+ if (gNextTest >= gTests.length) {
+ do_test_finished();
+ return;
+ }
+
+ dbservice.resetDatabase();
+ dbservice.setHashCompleter('test-phish-simple', null);
+
+ let test = gTests[gNextTest++];
+ dump("running " + test.name + "\n");
+ test();
+}
+
+function runTests(tests)
+{
+ gTests = tests;
+ runNextTest();
+}
+
+var timerArray = [];
+
+function Timer(delay, cb) {
+ this.cb = cb;
+ var timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ timer.initWithCallback(this, delay, timer.TYPE_ONE_SHOT);
+ timerArray.push(timer);
+}
+
+Timer.prototype = {
+QueryInterface: function(iid) {
+ if (!iid.equals(Ci.nsISupports) && !iid.equals(Ci.nsITimerCallback)) {
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ }
+ return this;
+ },
+notify: function(timer) {
+ this.cb();
+ }
+}
+
+// LFSRgenerator is a 32-bit linear feedback shift register random number
+// generator. It is highly predictable and is not intended to be used for
+// cryptography but rather to allow easier debugging than a test that uses
+// Math.random().
+function LFSRgenerator(seed) {
+ // Force |seed| to be a number.
+ seed = +seed;
+ // LFSR generators do not work with a value of 0.
+ if (seed == 0)
+ seed = 1;
+
+ this._value = seed;
+}
+LFSRgenerator.prototype = {
+ // nextNum returns a random unsigned integer of in the range [0,2^|bits|].
+ nextNum: function(bits) {
+ if (!bits)
+ bits = 32;
+
+ let val = this._value;
+ // Taps are 32, 22, 2 and 1.
+ let bit = ((val >>> 0) ^ (val >>> 10) ^ (val >>> 30) ^ (val >>> 31)) & 1;
+ val = (val >>> 1) | (bit << 31);
+ this._value = val;
+
+ return (val >>> (32 - bits));
+ },
+};
+
+cleanUp();
diff --git a/toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js b/toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js
new file mode 100644
index 000000000..37f39d1a8
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js
@@ -0,0 +1 @@
+cleanUp();
diff --git a/toolkit/components/url-classifier/tests/unit/test_addsub.js b/toolkit/components/url-classifier/tests/unit/test_addsub.js
new file mode 100644
index 000000000..1ed65c7ba
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_addsub.js
@@ -0,0 +1,488 @@
+
+function doTest(updates, assertions)
+{
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+}
+
+// Test an add of two urls to a fresh database
+function testSimpleAdds() {
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : addUrls
+ };
+
+ doTest([update], assertions);
+}
+
+// Same as testSimpleAdds, but make the same-domain URLs come from different
+// chunks.
+function testMultipleAdds() {
+ var add1Urls = [ "foo.com/a", "bar.com/c" ];
+ var add2Urls = [ "foo.com/b" ];
+
+ var update = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls },
+ { "chunkNum" : 2,
+ "urls" : add2Urls }]);
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-2",
+ "urlsExist" : add1Urls.concat(add2Urls)
+ };
+
+ doTest([update], assertions);
+}
+
+// Test that a sub will remove an existing add
+function testSimpleSub()
+{
+ var addUrls = ["foo.com/a", "bar.com/b"];
+ var subUrls = ["1:foo.com/a"];
+
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
+ "urls": addUrls }]);
+
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 50,
+ "chunkType" : "s",
+ "urls": subUrls }]);
+
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1:s:50",
+ "urlsExist" : [ "bar.com/b" ],
+ "urlsDontExist": ["foo.com/a" ],
+ "subsDontExist" : [ "foo.com/a" ]
+ }
+
+ doTest([addUpdate, subUpdate], assertions);
+
+}
+
+// Same as testSimpleSub(), but the sub comes in before the add.
+function testSubEmptiesAdd()
+{
+ var subUrls = ["1:foo.com/a"];
+ var addUrls = ["foo.com/a", "bar.com/b"];
+
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 50,
+ "chunkType" : "s",
+ "urls": subUrls }]);
+
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls": addUrls }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1:s:50",
+ "urlsExist" : [ "bar.com/b" ],
+ "urlsDontExist": ["foo.com/a" ],
+ "subsDontExist" : [ "foo.com/a" ] // this sub was found, it shouldn't exist anymore
+ }
+
+ doTest([subUpdate, addUpdate], assertions);
+}
+
+// Very similar to testSubEmptiesAdd, except that the domain entry will
+// still have an item left over that needs to be synced.
+function testSubPartiallyEmptiesAdd()
+{
+ var subUrls = ["1:foo.com/a"];
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/b"];
+
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls": subUrls }]);
+
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
+ "urls": addUrls }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1:s:1",
+ "urlsExist" : [ "foo.com/b", "bar.com/b" ],
+ "urlsDontExist" : ["foo.com/a" ],
+ "subsDontExist" : [ "foo.com/a" ] // this sub was found, it shouldn't exist anymore
+ }
+
+ doTest([subUpdate, addUpdate], assertions);
+}
+
+// We SHOULD be testing that pending subs are removed using
+// subsDontExist assertions. Since we don't have a good interface for getting
+// at sub entries, we'll verify it by side-effect. Subbing a url once
+// then adding it twice should leave the url intact.
+function testPendingSubRemoved()
+{
+ var subUrls = ["1:foo.com/a", "2:foo.com/b"];
+ var addUrls = ["foo.com/a", "foo.com/b"];
+
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls": subUrls }]);
+
+ var addUpdate1 = buildPhishingUpdate(
+ [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
+ "urls": addUrls }]);
+
+ var addUpdate2 = buildPhishingUpdate(
+ [{ "chunkNum" : 2,
+ "urls": addUrls }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-2:s:1",
+ "urlsExist" : [ "foo.com/a", "foo.com/b" ],
+ "subsDontExist" : [ "foo.com/a", "foo.com/b" ] // this sub was found, it shouldn't exist anymore
+ }
+
+ doTest([subUpdate, addUpdate1, addUpdate2], assertions);
+}
+
+// Make sure that a saved sub is removed when the sub chunk is expired.
+function testPendingSubExpire()
+{
+ var subUrls = ["1:foo.com/a", "1:foo.com/b"];
+ var addUrls = ["foo.com/a", "foo.com/b"];
+
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls": subUrls }]);
+
+ var expireUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "sd" }]);
+
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
+ "urls": addUrls }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : [ "foo.com/a", "foo.com/b" ],
+ "subsDontExist" : [ "foo.com/a", "foo.com/b" ] // this sub was expired
+ }
+
+ doTest([subUpdate, expireUpdate, addUpdate], assertions);
+}
+
+// Make sure that the sub url removes from only the chunk that it specifies
+function testDuplicateAdds()
+{
+ var urls = ["foo.com/a"];
+
+ var addUpdate1 = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls": urls }]);
+ var addUpdate2 = buildPhishingUpdate(
+ [{ "chunkNum" : 2,
+ "urls": urls }]);
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 3,
+ "chunkType" : "s",
+ "urls": ["2:foo.com/a"]}]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-2:s:3",
+ "urlsExist" : [ "foo.com/a"],
+ "subsDontExist" : [ "foo.com/a"]
+ }
+
+ doTest([addUpdate1, addUpdate2, subUpdate], assertions);
+}
+
+// Tests a sub which matches some existing adds but leaves others.
+function testSubPartiallyMatches()
+{
+ var subUrls = ["foo.com/a"];
+ var addUrls = ["1:foo.com/a", "2:foo.com/b"];
+
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : addUrls }]);
+
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls" : addUrls }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1:s:1",
+ "urlsDontExist" : ["foo.com/a"],
+ "subsDontExist" : ["foo.com/a"],
+ "subsExist" : ["foo.com/b"]
+ };
+
+ doTest([addUpdate, subUpdate], assertions);
+}
+
+// XXX: because subsExist isn't actually implemented, this is the same
+// test as above but with a second add chunk that should fail to be added
+// because of a pending sub chunk.
+function testSubPartiallyMatches2()
+{
+ var addUrls = ["foo.com/a"];
+ var subUrls = ["1:foo.com/a", "2:foo.com/b"];
+ var addUrls2 = ["foo.com/b"];
+
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : addUrls }]);
+
+ var subUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls" : subUrls }]);
+
+ var addUpdate2 = buildPhishingUpdate(
+ [{ "chunkNum" : 2,
+ "urls" : addUrls2 }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-2:s:1",
+ "urlsDontExist" : ["foo.com/a", "foo.com/b"],
+ "subsDontExist" : ["foo.com/a", "foo.com/b"]
+ };
+
+ doTest([addUpdate, subUpdate, addUpdate2], assertions);
+}
+
+// Verify that two subs for the same domain but from different chunks
+// match (tests that existing sub entries are properly updated)
+function testSubsDifferentChunks() {
+ var subUrls1 = [ "3:foo.com/a" ];
+ var subUrls2 = [ "3:foo.com/b" ];
+
+ var addUrls = [ "foo.com/a", "foo.com/b", "foo.com/c" ];
+
+ var subUpdate1 = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls": subUrls1 }]);
+ var subUpdate2 = buildPhishingUpdate(
+ [{ "chunkNum" : 2,
+ "chunkType" : "s",
+ "urls" : subUrls2 }]);
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 3,
+ "urls" : addUrls }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:3:s:1-2",
+ "urlsExist" : [ "foo.com/c" ],
+ "urlsDontExist" : [ "foo.com/a", "foo.com/b" ],
+ "subsDontExist" : [ "foo.com/a", "foo.com/b" ]
+ };
+
+ doTest([subUpdate1, subUpdate2, addUpdate], assertions);
+}
+
+// for bug 534079
+function testSubsDifferentChunksSameHostId() {
+ var subUrls1 = [ "1:foo.com/a" ];
+ var subUrls2 = [ "1:foo.com/b", "2:foo.com/c" ];
+
+ var addUrls = [ "foo.com/a", "foo.com/b" ];
+ var addUrls2 = [ "foo.com/c" ];
+
+ var subUpdate1 = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls": subUrls1 }]);
+ var subUpdate2 = buildPhishingUpdate(
+ [{ "chunkNum" : 2,
+ "chunkType" : "s",
+ "urls" : subUrls2 }]);
+
+ var addUpdate = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : addUrls }]);
+ var addUpdate2 = buildPhishingUpdate(
+ [{ "chunkNum" : 2,
+ "urls" : addUrls2 }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-2:s:1-2",
+ "urlsDontExist" : [ "foo.com/c", "foo.com/b", "foo.com/a", ],
+ };
+
+ doTest([addUpdate, addUpdate2, subUpdate1, subUpdate2], assertions);
+}
+
+// Test lists of expired chunks
+function testExpireLists() {
+ var addUpdate = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : [ "foo.com/a" ]
+ },
+ { "chunkNum" : 3,
+ "urls" : [ "bar.com/a" ]
+ },
+ { "chunkNum" : 4,
+ "urls" : [ "baz.com/a" ]
+ },
+ { "chunkNum" : 5,
+ "urls" : [ "blah.com/a" ]
+ },
+ ]);
+ var subUpdate = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "chunkType" : "s",
+ "urls" : [ "50:foo.com/1" ]
+ },
+ { "chunkNum" : 2,
+ "chunkType" : "s",
+ "urls" : [ "50:bar.com/1" ]
+ },
+ { "chunkNum" : 3,
+ "chunkType" : "s",
+ "urls" : [ "50:baz.com/1" ]
+ },
+ { "chunkNum" : 5,
+ "chunkType" : "s",
+ "urls" : [ "50:blah.com/1" ]
+ },
+ ]);
+
+ var expireUpdate = buildPhishingUpdate(
+ [ { "chunkType" : "ad:1,3-5" },
+ { "chunkType" : "sd:1-3,5" }]);
+
+ var assertions = {
+ // "tableData" : "test-phish-simple;"
+ "tableData": ""
+ };
+
+ doTest([addUpdate, subUpdate, expireUpdate], assertions);
+}
+
+// Test a duplicate add chunk.
+function testDuplicateAddChunks() {
+ var addUrls1 = [ "foo.com/a" ];
+ var addUrls2 = [ "bar.com/b" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls1
+ },
+ { "chunkNum" : 1,
+ "urls" : addUrls2
+ }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : addUrls1,
+ "urlsDontExist" : addUrls2
+ };
+
+ doTest([update], assertions);
+}
+
+// This test is a bit tricky. We want to test that an add removes all
+// subs with the same add chunk id, even if there is no match. To do
+// that we need to add the same add chunk twice, with an expiration
+// in the middle. This would be easier if subsDontExist actually
+// worked...
+function testExpireWholeSub()
+{
+ var subUrls = ["1:foo.com/a"];
+
+ var update = buildPhishingUpdate(
+ [{ "chunkNum" : 5,
+ "chunkType" : "s",
+ "urls" : subUrls
+ },
+ // empty add chunk should still cause foo.com/a to go away.
+ { "chunkNum" : 1,
+ "urls" : []
+ },
+ // and now adding chunk 1 again with foo.com/a should succeed,
+ // because the sub should have been expired with the empty
+ // add chunk.
+
+ // we need to expire this chunk to let us add chunk 1 again.
+ {
+ "chunkType" : "ad:1"
+ },
+ { "chunkNum" : 1,
+ "urls" : [ "foo.com/a" ]
+ }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1:s:5",
+ "urlsExist" : ["foo.com/a"]
+ };
+
+ doTest([update], assertions);
+}
+
+
+// This test is roughly the opposite of testExpireWholeSub(). We add
+// the empty add first, and make sure that it prevents a sub for that
+// add from being applied.
+function testPreventWholeSub()
+{
+ var subUrls = ["1:foo.com/a"];
+
+ var update = buildPhishingUpdate(
+ [ // empty add chunk should cause foo.com/a to not be saved
+ { "chunkNum" : 1,
+ "urls" : []
+ },
+ { "chunkNum" : 5,
+ "chunkType" : "s",
+ "urls" : subUrls
+ },
+ // and now adding chunk 1 again with foo.com/a should succeed,
+ // because the sub should have been expired with the empty
+ // add chunk.
+
+ // we need to expire this chunk to let us add chunk 1 again.
+ {
+ "chunkType" : "ad:1"
+ },
+ { "chunkNum" : 1,
+ "urls" : [ "foo.com/a" ]
+ }]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1:s:5",
+ "urlsExist" : ["foo.com/a"]
+ };
+
+ doTest([update], assertions);
+}
+
+function run_test()
+{
+ runTests([
+ testSimpleAdds,
+ testMultipleAdds,
+ testSimpleSub,
+ testSubEmptiesAdd,
+ testSubPartiallyEmptiesAdd,
+ testPendingSubRemoved,
+ testPendingSubExpire,
+ testDuplicateAdds,
+ testSubPartiallyMatches,
+ testSubPartiallyMatches2,
+ testSubsDifferentChunks,
+ testSubsDifferentChunksSameHostId,
+ testExpireLists
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_backoff.js b/toolkit/components/url-classifier/tests/unit/test_backoff.js
new file mode 100644
index 000000000..365568c47
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_backoff.js
@@ -0,0 +1,89 @@
+// Some unittests (e.g., paste into JS shell)
+var jslib = Cc["@mozilla.org/url-classifier/jslib;1"].
+ getService().wrappedJSObject;
+var _Datenow = jslib.Date.now;
+function setNow(time) {
+ jslib.Date.now = function() {
+ return time;
+ }
+}
+
+function run_test() {
+ // 3 errors, 1ms retry period, max 3 requests per ten milliseconds,
+ // 5ms backoff interval, 19ms max delay
+ var rb = new jslib.RequestBackoff(3, 1, 3, 10, 5, 19);
+ setNow(1);
+ rb.noteServerResponse(200);
+ do_check_true(rb.canMakeRequest());
+ setNow(2);
+ do_check_true(rb.canMakeRequest());
+
+ // First error should trigger a 1ms delay
+ rb.noteServerResponse(500);
+ do_check_false(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 3);
+ setNow(3);
+ do_check_true(rb.canMakeRequest());
+
+ // Second error should also trigger a 1ms delay
+ rb.noteServerResponse(500);
+ do_check_false(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 4);
+ setNow(4);
+ do_check_true(rb.canMakeRequest());
+
+ // Third error should trigger a 5ms backoff
+ rb.noteServerResponse(500);
+ do_check_false(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 9);
+ setNow(9);
+ do_check_true(rb.canMakeRequest());
+
+ // Trigger backoff again
+ rb.noteServerResponse(503);
+ do_check_false(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 19);
+ setNow(19);
+ do_check_true(rb.canMakeRequest());
+
+ // Trigger backoff a third time and hit max timeout
+ rb.noteServerResponse(302);
+ do_check_false(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 38);
+ setNow(38);
+ do_check_true(rb.canMakeRequest());
+
+ // One more backoff, should still be at the max timeout
+ rb.noteServerResponse(400);
+ do_check_false(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 57);
+ setNow(57);
+ do_check_true(rb.canMakeRequest());
+
+ // Request goes through
+ rb.noteServerResponse(200);
+ do_check_true(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 0);
+ setNow(58);
+ rb.noteServerResponse(500);
+
+ // Another error, should trigger a 1ms backoff
+ do_check_false(rb.canMakeRequest());
+ do_check_eq(rb.nextRequestTime_, 59);
+
+ setNow(59);
+ do_check_true(rb.canMakeRequest());
+
+ setNow(200);
+ rb.noteRequest();
+ setNow(201);
+ rb.noteRequest();
+ setNow(202);
+ do_check_true(rb.canMakeRequest());
+ rb.noteRequest();
+ do_check_false(rb.canMakeRequest());
+ setNow(211);
+ do_check_true(rb.canMakeRequest());
+
+ jslib.Date.now = _Datenow;
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js b/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
new file mode 100644
index 000000000..037bc7b88
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
@@ -0,0 +1,32 @@
+Cu.import("resource://gre/modules/SafeBrowsing.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+Cu.import("resource://testing-common/AppInfo.jsm");
+
+// 'Cc["@mozilla.org/xre/app-info;1"]' for xpcshell has no nsIXULAppInfo
+// so that we have to update it to make nsURLFormatter.js happy.
+// (SafeBrowsing.init() will indirectly use nsURLFormatter.js)
+updateAppInfo();
+
+function run_test() {
+ SafeBrowsing.init();
+
+ let origList = Services.prefs.getCharPref("browser.safebrowsing.provider.google.lists");
+
+ // Remove 'goog-malware-shavar' from the original.
+ let trimmedList = origList.replace('goog-malware-shavar,', '');
+ Services.prefs.setCharPref("browser.safebrowsing.provider.google.lists", trimmedList);
+
+ try {
+ // Bug 1274685 - Unowned Safe Browsing tables break list updates
+ //
+ // If SafeBrowsing.registerTableWithURLs() doesn't check if
+ // a provider is found before registering table, an exception
+ // will be thrown while accessing a null object.
+ //
+ SafeBrowsing.registerTables();
+ } catch (e) {
+ ok(false, 'Exception thrown due to ' + e.toString());
+ }
+
+ Services.prefs.setCharPref("browser.safebrowsing.provider.google.lists", origList);
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_dbservice.js b/toolkit/components/url-classifier/tests/unit/test_dbservice.js
new file mode 100644
index 000000000..4b01e7016
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_dbservice.js
@@ -0,0 +1,314 @@
+var checkUrls = [];
+var checkExpect;
+
+var chunk1Urls = [
+ "test.com/aba",
+ "test.com/foo/bar",
+ "foo.bar.com/a/b/c"
+];
+var chunk1 = chunk1Urls.join("\n");
+
+var chunk2Urls = [
+ "blah.com/a",
+ "baz.com/",
+ "255.255.0.1/",
+ "www.foo.com/test2?param=1"
+];
+var chunk2 = chunk2Urls.join("\n");
+
+var chunk3Urls = [
+ "test.com/a",
+ "foo.bar.com/a",
+ "blah.com/a",
+ ];
+var chunk3 = chunk3Urls.join("\n");
+
+var chunk3SubUrls = [
+ "1:test.com/a",
+ "1:foo.bar.com/a",
+ "2:blah.com/a" ];
+var chunk3Sub = chunk3SubUrls.join("\n");
+
+var chunk4Urls = [
+ "a.com/b",
+ "b.com/c",
+ ];
+var chunk4 = chunk4Urls.join("\n");
+
+var chunk5Urls = [
+ "d.com/e",
+ "f.com/g",
+ ];
+var chunk5 = chunk5Urls.join("\n");
+
+var chunk6Urls = [
+ "h.com/i",
+ "j.com/k",
+ ];
+var chunk6 = chunk6Urls.join("\n");
+
+var chunk7Urls = [
+ "l.com/m",
+ "n.com/o",
+ ];
+var chunk7 = chunk7Urls.join("\n");
+
+// we are going to add chunks 1, 2, 4, 5, and 6 to phish-simple,
+// chunk 2 to malware-simple, and chunk 3 to unwanted-simple,
+// and chunk 7 to block-simple.
+// Then we'll remove the urls in chunk3 from phish-simple, then
+// expire chunk 1 and chunks 4-7 from phish-simple.
+var phishExpected = {};
+var phishUnexpected = {};
+var malwareExpected = {};
+var unwantedExpected = {};
+var blockedExpected = {};
+for (var i = 0; i < chunk2Urls.length; i++) {
+ phishExpected[chunk2Urls[i]] = true;
+ malwareExpected[chunk2Urls[i]] = true;
+}
+for (var i = 0; i < chunk3Urls.length; i++) {
+ unwantedExpected[chunk3Urls[i]] = true;
+ delete phishExpected[chunk3Urls[i]];
+ phishUnexpected[chunk3Urls[i]] = true;
+}
+for (var i = 0; i < chunk1Urls.length; i++) {
+ // chunk1 urls are expired
+ phishUnexpected[chunk1Urls[i]] = true;
+}
+for (var i = 0; i < chunk4Urls.length; i++) {
+ // chunk4 urls are expired
+ phishUnexpected[chunk4Urls[i]] = true;
+}
+for (var i = 0; i < chunk5Urls.length; i++) {
+ // chunk5 urls are expired
+ phishUnexpected[chunk5Urls[i]] = true;
+}
+for (var i = 0; i < chunk6Urls.length; i++) {
+ // chunk6 urls are expired
+ phishUnexpected[chunk6Urls[i]] = true;
+}
+for (var i = 0; i < chunk7Urls.length; i++) {
+ blockedExpected[chunk7Urls[i]] = true;
+ // chunk7 urls are expired
+ phishUnexpected[chunk7Urls[i]] = true;
+}
+
+// Check that the entries hit based on sub-parts
+phishExpected["baz.com/foo/bar"] = true;
+phishExpected["foo.bar.baz.com/foo"] = true;
+phishExpected["bar.baz.com/"] = true;
+
+var numExpecting;
+
+function testFailure(arg) {
+ do_throw(arg);
+}
+
+function checkNoHost()
+{
+ // Looking up a no-host uri such as a data: uri should throw an exception.
+ var exception;
+ try {
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("data:text/html,<b>test</b>", null, null), {});
+ dbservice.lookup(principal, allTables);
+
+ exception = false;
+ } catch(e) {
+ exception = true;
+ }
+ do_check_true(exception);
+
+ do_test_finished();
+}
+
+function tablesCallbackWithoutSub(tables)
+{
+ var parts = tables.split("\n");
+ parts.sort();
+
+ // there's a leading \n here because splitting left an empty string
+ // after the trailing newline, which will sort first
+ do_check_eq(parts.join("\n"),
+ "\ntest-block-simple;a:1\ntest-malware-simple;a:1\ntest-phish-simple;a:2\ntest-unwanted-simple;a:1");
+
+ checkNoHost();
+}
+
+
+function expireSubSuccess(result) {
+ dbservice.getTables(tablesCallbackWithoutSub);
+}
+
+function tablesCallbackWithSub(tables)
+{
+ var parts = tables.split("\n");
+ parts.sort();
+
+ // there's a leading \n here because splitting left an empty string
+ // after the trailing newline, which will sort first
+ do_check_eq(parts.join("\n"),
+ "\ntest-block-simple;a:1\ntest-malware-simple;a:1\ntest-phish-simple;a:2:s:3\ntest-unwanted-simple;a:1");
+
+ // verify that expiring a sub chunk removes its name from the list
+ var data =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "sd:3\n";
+
+ doSimpleUpdate(data, expireSubSuccess, testFailure);
+}
+
+function checkChunksWithSub()
+{
+ dbservice.getTables(tablesCallbackWithSub);
+}
+
+function checkDone() {
+ if (--numExpecting == 0)
+ checkChunksWithSub();
+}
+
+function phishExists(result) {
+ dumpn("phishExists: " + result);
+ try {
+ do_check_true(result.indexOf("test-phish-simple") != -1);
+ } finally {
+ checkDone();
+ }
+}
+
+function phishDoesntExist(result) {
+ dumpn("phishDoesntExist: " + result);
+ try {
+ do_check_true(result.indexOf("test-phish-simple") == -1);
+ } finally {
+ checkDone();
+ }
+}
+
+function malwareExists(result) {
+ dumpn("malwareExists: " + result);
+
+ try {
+ do_check_true(result.indexOf("test-malware-simple") != -1);
+ } finally {
+ checkDone();
+ }
+}
+
+function unwantedExists(result) {
+ dumpn("unwantedExists: " + result);
+
+ try {
+ do_check_true(result.indexOf("test-unwanted-simple") != -1);
+ } finally {
+ checkDone();
+ }
+}
+
+function blockedExists(result) {
+ dumpn("blockedExists: " + result);
+
+ try {
+ do_check_true(result.indexOf("test-block-simple") != -1);
+ } finally {
+ checkDone();
+ }
+}
+
+function checkState()
+{
+ numExpecting = 0;
+
+
+ for (var key in phishExpected) {
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
+ dbservice.lookup(principal, allTables, phishExists, true);
+ numExpecting++;
+ }
+
+ for (var key in phishUnexpected) {
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
+ dbservice.lookup(principal, allTables, phishDoesntExist, true);
+ numExpecting++;
+ }
+
+ for (var key in malwareExpected) {
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
+ dbservice.lookup(principal, allTables, malwareExists, true);
+ numExpecting++;
+ }
+
+ for (var key in unwantedExpected) {
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
+ dbservice.lookup(principal, allTables, unwantedExists, true);
+ numExpecting++;
+ }
+
+ for (var key in blockedExpected) {
+ var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
+ dbservice.lookup(principal, allTables, blockedExists, true);
+ numExpecting++;
+ }
+}
+
+function testSubSuccess(result)
+{
+ do_check_eq(result, "1000");
+ checkState();
+}
+
+function do_subs() {
+ var data =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "s:3:32:" + chunk3Sub.length + "\n" +
+ chunk3Sub + "\n" +
+ "ad:1\n" +
+ "ad:4-6\n";
+
+ doSimpleUpdate(data, testSubSuccess, testFailure);
+}
+
+function testAddSuccess(arg) {
+ do_check_eq(arg, "1000");
+
+ do_subs();
+}
+
+function do_adds() {
+ // This test relies on the fact that only -regexp tables are ungzipped,
+ // and only -hash tables are assumed to be pre-md5'd. So we use
+ // a 'simple' table type to get simple hostname-per-line semantics.
+
+ var data =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "a:1:32:" + chunk1.length + "\n" +
+ chunk1 + "\n" +
+ "a:2:32:" + chunk2.length + "\n" +
+ chunk2 + "\n" +
+ "a:4:32:" + chunk4.length + "\n" +
+ chunk4 + "\n" +
+ "a:5:32:" + chunk5.length + "\n" +
+ chunk5 + "\n" +
+ "a:6:32:" + chunk6.length + "\n" +
+ chunk6 + "\n" +
+ "i:test-malware-simple\n" +
+ "a:1:32:" + chunk2.length + "\n" +
+ chunk2 + "\n" +
+ "i:test-unwanted-simple\n" +
+ "a:1:32:" + chunk3.length + "\n" +
+ chunk3 + "\n" +
+ "i:test-block-simple\n" +
+ "a:1:32:" + chunk7.length + "\n" +
+ chunk7 + "\n";
+
+ doSimpleUpdate(data, testAddSuccess, testFailure);
+}
+
+function run_test() {
+ do_adds();
+ do_test_pending();
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_digest256.js b/toolkit/components/url-classifier/tests/unit/test_digest256.js
new file mode 100644
index 000000000..6ae652915
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_digest256.js
@@ -0,0 +1,147 @@
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+
+XPCOMUtils.defineLazyModuleGetter(this, "NetUtil",
+ "resource://gre/modules/NetUtil.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "Promise",
+ "resource://gre/modules/Promise.jsm");
+// Global test server for serving safebrowsing updates.
+var gHttpServ = null;
+// Global nsIUrlClassifierDBService
+var gDbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
+ .getService(Ci.nsIUrlClassifierDBService);
+// Security manager for creating nsIPrincipals from URIs
+var gSecMan = Cc["@mozilla.org/scriptsecuritymanager;1"]
+ .getService(Ci.nsIScriptSecurityManager);
+
+// A map of tables to arrays of update redirect urls.
+var gTables = {};
+
+// Construct an update from a file.
+function readFileToString(aFilename) {
+ let f = do_get_file(aFilename);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"]
+ .createInstance(Ci.nsIFileInputStream);
+ stream.init(f, -1, 0, 0);
+ let buf = NetUtil.readInputStreamToString(stream, stream.available());
+ return buf;
+}
+
+// Registers a table for which to serve update chunks. Returns a promise that
+// resolves when that chunk has been downloaded.
+function registerTableUpdate(aTable, aFilename) {
+ let deferred = Promise.defer();
+ // If we haven't been given an update for this table yet, add it to the map
+ if (!(aTable in gTables)) {
+ gTables[aTable] = [];
+ }
+
+ // The number of chunks associated with this table.
+ let numChunks = gTables[aTable].length + 1;
+ let redirectPath = "/" + aTable + "-" + numChunks;
+ let redirectUrl = "localhost:4444" + redirectPath;
+
+ // Store redirect url for that table so we can return it later when we
+ // process an update request.
+ gTables[aTable].push(redirectUrl);
+
+ gHttpServ.registerPathHandler(redirectPath, function(request, response) {
+ do_print("Mock safebrowsing server handling request for " + redirectPath);
+ let contents = readFileToString(aFilename);
+ response.setHeader("Content-Type",
+ "application/vnd.google.safebrowsing-update", false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(contents, contents.length);
+ deferred.resolve(contents);
+ });
+ return deferred.promise;
+}
+
+// Construct a response with redirect urls.
+function processUpdateRequest() {
+ let response = "n:1000\n";
+ for (let table in gTables) {
+ response += "i:" + table + "\n";
+ for (let i = 0; i < gTables[table].length; ++i) {
+ response += "u:" + gTables[table][i] + "\n";
+ }
+ }
+ do_print("Returning update response: " + response);
+ return response;
+}
+
+// Set up our test server to handle update requests.
+function run_test() {
+ gHttpServ = new HttpServer();
+ gHttpServ.registerDirectory("/", do_get_cwd());
+
+ gHttpServ.registerPathHandler("/downloads", function(request, response) {
+ let buf = NetUtil.readInputStreamToString(request.bodyInputStream,
+ request.bodyInputStream.available());
+ let blob = processUpdateRequest();
+ response.setHeader("Content-Type",
+ "application/vnd.google.safebrowsing-update", false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(blob, blob.length);
+ });
+
+ gHttpServ.start(4444);
+ run_next_test();
+}
+
+function createURI(s) {
+ let service = Cc["@mozilla.org/network/io-service;1"]
+ .getService(Ci.nsIIOService);
+ return service.newURI(s, null, null);
+}
+
+// Just throw if we ever get an update or download error.
+function handleError(aEvent) {
+ do_throw("We didn't download or update correctly: " + aEvent);
+}
+
+add_test(function test_update() {
+ let streamUpdater = Cc["@mozilla.org/url-classifier/streamupdater;1"]
+ .getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Load up some update chunks for the safebrowsing server to serve.
+ registerTableUpdate("goog-downloadwhite-digest256", "data/digest1.chunk");
+ registerTableUpdate("goog-downloadwhite-digest256", "data/digest2.chunk");
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccess(aEvent) {
+ // Timeout of n:1000 is constructed in processUpdateRequest above and
+ // passed back in the callback in nsIUrlClassifierStreamUpdater on success.
+ do_check_eq("1000", aEvent);
+ do_print("All data processed");
+ run_next_test();
+ }
+ streamUpdater.downloadUpdates(
+ "goog-downloadwhite-digest256",
+ "goog-downloadwhite-digest256;\n",
+ true,
+ "http://localhost:4444/downloads",
+ updateSuccess, handleError, handleError);
+});
+
+add_test(function test_url_not_whitelisted() {
+ let uri = createURI("http://example.com");
+ let principal = gSecMan.createCodebasePrincipal(uri, {});
+ gDbService.lookup(principal, "goog-downloadwhite-digest256",
+ function handleEvent(aEvent) {
+ // This URI is not on any lists.
+ do_check_eq("", aEvent);
+ run_next_test();
+ });
+});
+
+add_test(function test_url_whitelisted() {
+ // Hash of "whitelisted.com/" (canonicalized URL) is:
+ // 93CA5F48E15E9861CD37C2D95DB43D23CC6E6DE5C3F8FA6E8BE66F97CC518907
+ let uri = createURI("http://whitelisted.com");
+ let principal = gSecMan.createCodebasePrincipal(uri, {});
+ gDbService.lookup(principal, "goog-downloadwhite-digest256",
+ function handleEvent(aEvent) {
+ do_check_eq("goog-downloadwhite-digest256", aEvent);
+ run_next_test();
+ });
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js b/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
new file mode 100644
index 000000000..40fafd923
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
@@ -0,0 +1,403 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// This test ensures that the nsIUrlClassifierHashCompleter works as expected
+// and simulates an HTTP server to provide completions.
+//
+// In order to test completions, each group of completions sent as one request
+// to the HTTP server is called a completion set. There is currently not
+// support for multiple requests being sent to the server at once, in this test.
+// This tests makes a request for each element of |completionSets|, waits for
+// a response and then moves to the next element.
+// Each element of |completionSets| is an array of completions, and each
+// completion is an object with the properties:
+// hash: complete hash for the completion. Automatically right-padded
+// to be COMPLETE_LENGTH.
+// expectCompletion: boolean indicating whether the server should respond
+// with a full hash.
+// forceServerError: boolean indicating whether the server should respond
+// with a 503.
+// table: name of the table that the hash corresponds to. Only needs to be set
+// if a completion is expected.
+// chunkId: positive integer corresponding to the chunk that the hash belongs
+// to. Only needs to be set if a completion is expected.
+// multipleCompletions: boolean indicating whether the server should respond
+// with more than one full hash. If this is set to true
+// then |expectCompletion| must also be set to true and
+// |hash| must have the same prefix as all |completions|.
+// completions: an array of completions (objects with a hash, table and
+// chunkId property as described above). This property is only
+// used when |multipleCompletions| is set to true.
+
+// Basic prefixes with 2/3 completions.
+var basicCompletionSet = [
+ {
+ hash: "abcdefgh",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1234,
+ },
+ {
+ hash: "1234",
+ expectCompletion: false,
+ },
+ {
+ hash: "\u0000\u0000\u000012312",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1234,
+ }
+];
+
+// 3 prefixes with 0 completions to test HashCompleter handling a 204 status.
+var falseCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: false,
+ },
+ {
+ hash: "",
+ expectCompletion: false,
+ },
+ {
+ hash: "abc",
+ expectCompletion: false,
+ }
+];
+
+// The current implementation (as of Mar 2011) sometimes sends duplicate
+// entries to HashCompleter and even expects responses for duplicated entries.
+var dupedCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1,
+ },
+ {
+ hash: "5678",
+ expectCompletion: false,
+ table: "test2",
+ chunkId: 2,
+ },
+ {
+ hash: "1234",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1,
+ },
+ {
+ hash: "5678",
+ expectCompletion: false,
+ table: "test2",
+ chunkId: 2
+ }
+];
+
+// It is possible for a hash completion request to return with multiple
+// completions, the HashCompleter should return all of these.
+var multipleResponsesCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: true,
+ multipleCompletions: true,
+ completions: [
+ {
+ hash: "123456",
+ table: "test1",
+ chunkId: 3,
+ },
+ {
+ hash: "123478",
+ table: "test2",
+ chunkId: 4,
+ }
+ ],
+ }
+];
+
+function buildCompletionRequest(aCompletionSet) {
+ let prefixes = [];
+ let prefixSet = new Set();
+ aCompletionSet.forEach(s => {
+ let prefix = s.hash.substring(0, 4);
+ if (prefixSet.has(prefix)) {
+ return;
+ }
+ prefixSet.add(prefix);
+ prefixes.push(prefix);
+ });
+ return 4 + ":" + (4 * prefixes.length) + "\n" + prefixes.join("");
+}
+
+function parseCompletionRequest(aRequest) {
+ // Format: [partial_length]:[num_of_prefix * partial_length]\n[prefixes_data]
+
+ let tokens = /(\d):(\d+)/.exec(aRequest);
+ if (tokens.length < 3) {
+ dump("Request format error.");
+ return null;
+ }
+
+ let partialLength = parseInt(tokens[1]);
+ let payloadLength = parseInt(tokens[2]);
+
+ let payloadStart = tokens[1].length + // partial length
+ 1 + // ':'
+ tokens[2].length + // payload length
+ 1; // '\n'
+
+ let prefixSet = [];
+ for (let i = payloadStart; i < aRequest.length; i += partialLength) {
+ let prefix = aRequest.substr(i, partialLength);
+ if (prefix.length !== partialLength) {
+ dump("Header info not correct: " + aRequest.substr(0, payloadStart));
+ return null;
+ }
+ prefixSet.push(prefix);
+ }
+ prefixSet.sort();
+
+ return prefixSet;
+}
+
+// Compare the requests in string format.
+function compareCompletionRequest(aRequest1, aRequest2) {
+ let prefixSet1 = parseCompletionRequest(aRequest1);
+ let prefixSet2 = parseCompletionRequest(aRequest2);
+
+ return equal(JSON.stringify(prefixSet1), JSON.stringify(prefixSet2));
+}
+
+// The fifth completion set is added at runtime by getRandomCompletionSet.
+// Each completion in the set only has one response and its purpose is to
+// provide an easy way to test the HashCompleter handling an arbitrarily large
+// completion set (determined by SIZE_OF_RANDOM_SET).
+const SIZE_OF_RANDOM_SET = 16;
+function getRandomCompletionSet(forceServerError) {
+ let completionSet = [];
+ let hashPrefixes = [];
+
+ let seed = Math.floor(Math.random() * Math.pow(2, 32));
+ dump("Using seed of " + seed + " for random completion set.\n");
+ let rand = new LFSRgenerator(seed);
+
+ for (let i = 0; i < SIZE_OF_RANDOM_SET; i++) {
+ let completion = { expectCompletion: false, forceServerError: false, _finished: false };
+
+ // Generate a random 256 bit hash. First we get a random number and then
+ // convert it to a string.
+ let hash;
+ let prefix;
+ do {
+ hash = "";
+ let length = 1 + rand.nextNum(5);
+ for (let i = 0; i < length; i++)
+ hash += String.fromCharCode(rand.nextNum(8));
+ prefix = hash.substring(0,4);
+ } while (hashPrefixes.indexOf(prefix) != -1);
+
+ hashPrefixes.push(prefix);
+ completion.hash = hash;
+
+ if (!forceServerError) {
+ completion.expectCompletion = rand.nextNum(1) == 1;
+ } else {
+ completion.forceServerError = true;
+ }
+ if (completion.expectCompletion) {
+ // Generate a random alpha-numeric string of length at most 6 for the
+ // table name.
+ completion.table = (rand.nextNum(31)).toString(36);
+
+ completion.chunkId = rand.nextNum(16);
+ }
+ completionSet.push(completion);
+ }
+
+ return completionSet;
+}
+
+var completionSets = [basicCompletionSet, falseCompletionSet,
+ dupedCompletionSet, multipleResponsesCompletionSet];
+var currentCompletionSet = -1;
+var finishedCompletions = 0;
+
+const SERVER_PORT = 8080;
+const SERVER_PATH = "/hash-completer";
+var server;
+
+// Completion hashes are automatically right-padded with null chars to have a
+// length of COMPLETE_LENGTH.
+// Taken from nsUrlClassifierDBService.h
+const COMPLETE_LENGTH = 32;
+
+var completer = Cc["@mozilla.org/url-classifier/hashcompleter;1"].
+ getService(Ci.nsIUrlClassifierHashCompleter);
+
+var gethashUrl;
+
+// Expected highest completion set for which the server sends a response.
+var expectedMaxServerCompletionSet = 0;
+var maxServerCompletionSet = 0;
+
+function run_test() {
+ // Generate a random completion set that return successful responses.
+ completionSets.push(getRandomCompletionSet(false));
+ // We backoff after receiving an error, so requests shouldn't reach the
+ // server after that.
+ expectedMaxServerCompletionSet = completionSets.length;
+ // Generate some completion sets that return 503s.
+ for (let j = 0; j < 10; ++j) {
+ completionSets.push(getRandomCompletionSet(true));
+ }
+
+ // Fix up the completions before running the test.
+ for (let completionSet of completionSets) {
+ for (let completion of completionSet) {
+ // Pad the right of each |hash| so that the length is COMPLETE_LENGTH.
+ if (completion.multipleCompletions) {
+ for (let responseCompletion of completion.completions) {
+ let numChars = COMPLETE_LENGTH - responseCompletion.hash.length;
+ responseCompletion.hash += (new Array(numChars + 1)).join("\u0000");
+ }
+ }
+ else {
+ let numChars = COMPLETE_LENGTH - completion.hash.length;
+ completion.hash += (new Array(numChars + 1)).join("\u0000");
+ }
+ }
+ }
+ do_test_pending();
+
+ server = new HttpServer();
+ server.registerPathHandler(SERVER_PATH, hashCompleterServer);
+
+ server.start(-1);
+ const SERVER_PORT = server.identity.primaryPort;
+
+ gethashUrl = "http://localhost:" + SERVER_PORT + SERVER_PATH;
+
+ runNextCompletion();
+}
+
+function runNextCompletion() {
+ // The server relies on currentCompletionSet to send the correct response, so
+ // don't increment it until we start the new set of callbacks.
+ currentCompletionSet++;
+ if (currentCompletionSet >= completionSets.length) {
+ finish();
+ return;
+ }
+
+ dump("Now on completion set index " + currentCompletionSet + ", length " +
+ completionSets[currentCompletionSet].length + "\n");
+ // Number of finished completions for this set.
+ finishedCompletions = 0;
+ for (let completion of completionSets[currentCompletionSet]) {
+ completer.complete(completion.hash.substring(0,4), gethashUrl,
+ (new callback(completion)));
+ }
+}
+
+function hashCompleterServer(aRequest, aResponse) {
+ let stream = aRequest.bodyInputStream;
+ let wrapperStream = Cc["@mozilla.org/binaryinputstream;1"].
+ createInstance(Ci.nsIBinaryInputStream);
+ wrapperStream.setInputStream(stream);
+
+ let len = stream.available();
+ let data = wrapperStream.readBytes(len);
+
+ // Check if we got the expected completion request.
+ let expectedRequest = buildCompletionRequest(completionSets[currentCompletionSet]);
+ compareCompletionRequest(data, expectedRequest);
+
+ // To avoid a response with duplicate hash completions, we keep track of all
+ // completed hash prefixes so far.
+ let completedHashes = [];
+ let responseText = "";
+
+ function responseForCompletion(x) {
+ return x.table + ":" + x.chunkId + ":" + x.hash.length + "\n" + x.hash;
+ }
+ // As per the spec, a server should response with a 204 if there are no
+ // full-length hashes that match the prefixes.
+ let httpStatus = 204;
+ for (let completion of completionSets[currentCompletionSet]) {
+ if (completion.expectCompletion &&
+ (completedHashes.indexOf(completion.hash) == -1)) {
+ completedHashes.push(completion.hash);
+
+ if (completion.multipleCompletions)
+ responseText += completion.completions.map(responseForCompletion).join("");
+ else
+ responseText += responseForCompletion(completion);
+ }
+ if (completion.forceServerError) {
+ httpStatus = 503;
+ }
+ }
+
+ dump("Server sending response for " + currentCompletionSet + "\n");
+ maxServerCompletionSet = currentCompletionSet;
+ if (responseText && httpStatus != 503) {
+ aResponse.write(responseText);
+ } else {
+ aResponse.setStatusLine(null, httpStatus, null);
+ }
+}
+
+
+function callback(completion) {
+ this._completion = completion;
+}
+
+callback.prototype = {
+ completion: function completion(hash, table, chunkId, trusted) {
+ do_check_true(this._completion.expectCompletion);
+ if (this._completion.multipleCompletions) {
+ for (let completion of this._completion.completions) {
+ if (completion.hash == hash) {
+ do_check_eq(JSON.stringify(hash), JSON.stringify(completion.hash));
+ do_check_eq(table, completion.table);
+ do_check_eq(chunkId, completion.chunkId);
+
+ completion._completed = true;
+
+ if (this._completion.completions.every(x => x._completed))
+ this._completed = true;
+
+ break;
+ }
+ }
+ }
+ else {
+ // Hashes are not actually strings and can contain arbitrary data.
+ do_check_eq(JSON.stringify(hash), JSON.stringify(this._completion.hash));
+ do_check_eq(table, this._completion.table);
+ do_check_eq(chunkId, this._completion.chunkId);
+
+ this._completed = true;
+ }
+ },
+
+ completionFinished: function completionFinished(status) {
+ finishedCompletions++;
+ do_check_eq(!!this._completion.expectCompletion, !!this._completed);
+ this._completion._finished = true;
+
+ // currentCompletionSet can mutate before all of the callbacks are complete.
+ if (currentCompletionSet < completionSets.length &&
+ finishedCompletions == completionSets[currentCompletionSet].length) {
+ runNextCompletion();
+ }
+ },
+};
+
+function finish() {
+ do_check_eq(expectedMaxServerCompletionSet, maxServerCompletionSet);
+ server.stop(function() {
+ do_test_finished();
+ });
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_listmanager.js b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
new file mode 100644
index 000000000..ba11d930e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
@@ -0,0 +1,376 @@
+Cu.import("resource://gre/modules/XPCOMUtils.jsm");
+Cu.import("resource://gre/modules/Services.jsm");
+
+XPCOMUtils.defineLazyModuleGetter(this, "NetUtil",
+ "resource://gre/modules/NetUtil.jsm");
+
+// These tables share the same updateURL.
+const TEST_TABLE_DATA_LIST = [
+ // 0:
+ {
+ tableName: "test-listmanager0-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash0",
+ },
+
+ // 1:
+ {
+ tableName: "test-listmanager1-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash1",
+ },
+
+ // 2.
+ {
+ tableName: "test-listmanager2-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash2",
+ }
+];
+
+// These tables have a different update URL (for v4).
+const TEST_TABLE_DATA_V4 = {
+ tableName: "test-phish-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
+};
+const TEST_TABLE_DATA_V4_DISABLED = {
+ tableName: "test-unwanted-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
+};
+
+const PREF_NEXTUPDATETIME = "browser.safebrowsing.provider.google.nextupdatetime";
+const PREF_NEXTUPDATETIME_V4 = "browser.safebrowsing.provider.google4.nextupdatetime";
+
+let gListManager = Cc["@mozilla.org/url-classifier/listmanager;1"]
+ .getService(Ci.nsIUrlListManager);
+
+let gUrlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
+ .getService(Ci.nsIUrlClassifierUtils);
+
+// Global test server for serving safebrowsing updates.
+let gHttpServ = null;
+let gUpdateResponse = "";
+let gExpectedUpdateRequest = "";
+let gExpectedQueryV4 = "";
+
+// Handles request for TEST_TABLE_DATA_V4.
+let gHttpServV4 = null;
+
+// These two variables are used to synchronize the last two racing updates
+// (in terms of "update URL") in test_update_all_tables().
+let gUpdatedCntForTableData = 0; // For TEST_TABLE_DATA_LIST.
+let gIsV4Updated = false; // For TEST_TABLE_DATA_V4.
+
+const NEW_CLIENT_STATE = 'sta\0te';
+const CHECKSUM = '\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78';
+
+prefBranch.setBoolPref("browser.safebrowsing.debug", true);
+
+// The "\xFF\xFF" is to generate a base64 string with "/".
+prefBranch.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
+
+// Register tables.
+TEST_TABLE_DATA_LIST.forEach(function(t) {
+ gListManager.registerTable(t.tableName,
+ t.providerName,
+ t.updateUrl,
+ t.gethashUrl);
+});
+
+gListManager.registerTable(TEST_TABLE_DATA_V4.tableName,
+ TEST_TABLE_DATA_V4.providerName,
+ TEST_TABLE_DATA_V4.updateUrl,
+ TEST_TABLE_DATA_V4.gethashUrl);
+
+// To test Bug 1302044.
+gListManager.registerTable(TEST_TABLE_DATA_V4_DISABLED.tableName,
+ TEST_TABLE_DATA_V4_DISABLED.providerName,
+ TEST_TABLE_DATA_V4_DISABLED.updateUrl,
+ TEST_TABLE_DATA_V4_DISABLED.gethashUrl);
+
+const SERVER_INVOLVED_TEST_CASE_LIST = [
+ // - Do table0 update.
+ // - Server would respond "a:5:32:32\n[DATA]".
+ function test_update_table0() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
+ gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";\n";
+
+ gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
+ gUpdateResponse += readFileToString("data/digest2.chunk");
+
+ forceTableUpdate();
+ },
+
+ // - Do table0 update again. Since chunk 5 was added to table0 in the last
+ // update, the expected request contains "a:5".
+ // - Server would respond "s;2-12\n[DATA]".
+ function test_update_table0_with_existing_chunks() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
+ gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";a:5\n";
+
+ gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
+ gUpdateResponse += readFileToString("data/digest1.chunk");
+
+ forceTableUpdate();
+ },
+
+ // - Do all-table update.
+ // - Server would respond no chunk control.
+ //
+ // Note that this test MUST be the last one in the array since we rely on
+ // the number of sever-involved test case to synchronize the racing last
+ // two udpates for different URL.
+ function test_update_all_tables() {
+ disableAllUpdates();
+
+ // Enable all tables including TEST_TABLE_DATA_V4!
+ TEST_TABLE_DATA_LIST.forEach(function(t) {
+ gListManager.enableUpdate(t.tableName);
+ });
+
+ // We register two v4 tables but only enable one of them
+ // to verify that the disabled tables are not updated.
+ // See Bug 1302044.
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4_DISABLED.tableName);
+
+ // Expected results for v2.
+ gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";a:5:s:2-12\n" +
+ TEST_TABLE_DATA_LIST[1].tableName + ";\n" +
+ TEST_TABLE_DATA_LIST[2].tableName + ";\n";
+ gUpdateResponse = "n:1000\n";
+
+ // We test the request against the query string since v4 request
+ // would be appened to the query string. The request is generated
+ // by protobuf API (binary) then encoded to base64 format.
+ let requestV4 = gUrlUtils.makeUpdateRequestV4([TEST_TABLE_DATA_V4.tableName],
+ [""],
+ 1);
+ gExpectedQueryV4 = "&$req=" + requestV4;
+
+ forceTableUpdate();
+ },
+
+];
+
+SERVER_INVOLVED_TEST_CASE_LIST.forEach(t => add_test(t));
+
+add_test(function test_partialUpdateV4() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+
+ // Since the new client state has been responded and saved in
+ // test_update_all_tables, this update request should send
+ // a partial update to the server.
+ let requestV4 = gUrlUtils.makeUpdateRequestV4([TEST_TABLE_DATA_V4.tableName],
+ [btoa(NEW_CLIENT_STATE)],
+ 1);
+ gExpectedQueryV4 = "&$req=" + requestV4;
+
+ forceTableUpdate();
+});
+
+// Tests nsIUrlListManager.getGethashUrl.
+add_test(function test_getGethashUrl() {
+ TEST_TABLE_DATA_LIST.forEach(function (t) {
+ equal(gListManager.getGethashUrl(t.tableName), t.gethashUrl);
+ });
+ equal(gListManager.getGethashUrl(TEST_TABLE_DATA_V4.tableName),
+ TEST_TABLE_DATA_V4.gethashUrl);
+ run_next_test();
+});
+
+function run_test() {
+ // Setup primary testing server.
+ gHttpServ = new HttpServer();
+ gHttpServ.registerDirectory("/", do_get_cwd());
+
+ gHttpServ.registerPathHandler("/safebrowsing/update", function(request, response) {
+ let body = NetUtil.readInputStreamToString(request.bodyInputStream,
+ request.bodyInputStream.available());
+
+ // Verify if the request is as expected.
+ equal(body, gExpectedUpdateRequest);
+
+ // Respond the update which is controlled by the test case.
+ response.setHeader("Content-Type",
+ "application/vnd.google.safebrowsing-update", false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(gUpdateResponse, gUpdateResponse.length);
+
+ gUpdatedCntForTableData++;
+
+ if (gUpdatedCntForTableData !== SERVER_INVOLVED_TEST_CASE_LIST.length) {
+ // This is not the last test case so run the next once upon the
+ // the update success.
+ waitForUpdateSuccess(run_next_test);
+ return;
+ }
+
+ if (gIsV4Updated) {
+ run_next_test(); // All tests are done. Just finish.
+ return;
+ }
+
+ do_print("Waiting for TEST_TABLE_DATA_V4 to be tested ...");
+ });
+
+ gHttpServ.start(4444);
+
+ // Setup v4 testing server for the different update URL.
+ gHttpServV4 = new HttpServer();
+ gHttpServV4.registerDirectory("/", do_get_cwd());
+
+ gHttpServV4.registerPathHandler("/safebrowsing/update", function(request, response) {
+ // V4 update request body should be empty.
+ equal(request.bodyInputStream.available(), 0);
+
+ // Not on the spec. Found in Chromium source code...
+ equal(request.getHeader("X-HTTP-Method-Override"), "POST");
+
+ // V4 update request uses GET.
+ equal(request.method, "GET");
+
+ // V4 append the base64 encoded request to the query string.
+ equal(request.queryString, gExpectedQueryV4);
+ equal(request.queryString.indexOf('+'), -1);
+ equal(request.queryString.indexOf('/'), -1);
+
+ // Respond a V2 compatible content for now. In the future we can
+ // send a meaningful response to test Bug 1284178 to see if the
+ // update is successfully stored to database.
+ response.setHeader("Content-Type",
+ "application/vnd.google.safebrowsing-update", false);
+ response.setStatusLine(request.httpVersion, 200, "OK");
+
+ // The protobuf binary represention of response:
+ //
+ // [
+ // {
+ // 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
+ // 'response_type': 2, // FULL_UPDATE
+ // 'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
+ // 'checksum': { "sha256": CHECKSUM }, // CHECKSUM
+ // 'additions': { 'compression_type': RAW,
+ // 'prefix_size': 4,
+ // 'raw_hashes': "00000001000000020000000300000004"}
+ // }
+ // ]
+ //
+ let content = "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
+
+ response.bodyOutputStream.write(content, content.length);
+
+ if (gIsV4Updated) {
+ // This falls to the case where test_partialUpdateV4 is running.
+ // We are supposed to have verified the update request contains
+ // the state we set in the previous request.
+ run_next_test();
+ return;
+ }
+
+ waitUntilMetaDataSaved(NEW_CLIENT_STATE, CHECKSUM, () => {
+ gIsV4Updated = true;
+
+ if (gUpdatedCntForTableData === SERVER_INVOLVED_TEST_CASE_LIST.length) {
+ // All tests are done!
+ run_next_test();
+ return;
+ }
+
+ do_print("Wait for all sever-involved tests to be done ...");
+ });
+
+ });
+
+ gHttpServV4.start(5555);
+
+ run_next_test();
+}
+
+// A trick to force updating tables. However, before calling this, we have to
+// call disableAllUpdates() first to clean up the updateCheckers in listmanager.
+function forceTableUpdate() {
+ prefBranch.setCharPref(PREF_NEXTUPDATETIME, "1");
+ prefBranch.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
+ gListManager.maybeToggleUpdateChecking();
+}
+
+function disableAllUpdates() {
+ TEST_TABLE_DATA_LIST.forEach(t => gListManager.disableUpdate(t.tableName));
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4.tableName);
+}
+
+// Since there's no public interface on listmanager to know the update success,
+// we could only rely on the refresh of "nextupdatetime".
+function waitForUpdateSuccess(callback) {
+ let nextupdatetime = parseInt(prefBranch.getCharPref(PREF_NEXTUPDATETIME));
+ do_print("nextupdatetime: " + nextupdatetime);
+ if (nextupdatetime !== 1) {
+ callback();
+ return;
+ }
+ do_timeout(1000, waitForUpdateSuccess.bind(null, callback));
+}
+
+// Construct an update from a file.
+function readFileToString(aFilename) {
+ let f = do_get_file(aFilename);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"]
+ .createInstance(Ci.nsIFileInputStream);
+ stream.init(f, -1, 0, 0);
+ let buf = NetUtil.readInputStreamToString(stream, stream.available());
+ return buf;
+}
+
+function waitUntilMetaDataSaved(expectedState, expectedChecksum, callback) {
+ let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
+ .getService(Ci.nsIUrlClassifierDBService);
+
+ dbService.getTables(metaData => {
+ do_print("metadata: " + metaData);
+ let didCallback = false;
+ metaData.split("\n").some(line => {
+ // Parse [tableName];[stateBase64]
+ let p = line.indexOf(";");
+ if (-1 === p) {
+ return false; // continue.
+ }
+ let tableName = line.substring(0, p);
+ let metadata = line.substring(p + 1).split(":");
+ let stateBase64 = metadata[0];
+ let checksumBase64 = metadata[1];
+
+ if (tableName !== 'test-phish-proto') {
+ return false; // continue.
+ }
+
+ if (stateBase64 === btoa(expectedState) &&
+ checksumBase64 === btoa(expectedChecksum)) {
+ do_print('State has been saved to disk!');
+ callback();
+ didCallback = true;
+ }
+
+ return true; // break no matter whether the state is matching.
+ });
+
+ if (!didCallback) {
+ do_timeout(1000, waitUntilMetaDataSaved.bind(null, expectedState,
+ expectedChecksum,
+ callback));
+ }
+ });
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_partial.js b/toolkit/components/url-classifier/tests/unit/test_partial.js
new file mode 100644
index 000000000..83243fb4e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_partial.js
@@ -0,0 +1,825 @@
+
+/**
+ * DummyCompleter() lets tests easily specify the results of a partial
+ * hash completion request.
+ */
+function DummyCompleter() {
+ this.fragments = {};
+ this.queries = [];
+ this.tableName = "test-phish-simple";
+}
+
+DummyCompleter.prototype =
+{
+QueryInterface: function(iid)
+{
+ if (!iid.equals(Ci.nsISupports) &&
+ !iid.equals(Ci.nsIUrlClassifierHashCompleter)) {
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ }
+ return this;
+},
+
+complete: function(partialHash, gethashUrl, cb)
+{
+ this.queries.push(partialHash);
+ var fragments = this.fragments;
+ var self = this;
+ var doCallback = function() {
+ if (self.alwaysFail) {
+ cb.completionFinished(1);
+ return;
+ }
+ var results;
+ if (fragments[partialHash]) {
+ for (var i = 0; i < fragments[partialHash].length; i++) {
+ var chunkId = fragments[partialHash][i][0];
+ var hash = fragments[partialHash][i][1];
+ cb.completion(hash, self.tableName, chunkId);
+ }
+ }
+ cb.completionFinished(0);
+ }
+ var timer = new Timer(0, doCallback);
+},
+
+getHash: function(fragment)
+{
+ var converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"].
+ createInstance(Ci.nsIScriptableUnicodeConverter);
+ converter.charset = "UTF-8";
+ var data = converter.convertToByteArray(fragment);
+ var ch = Cc["@mozilla.org/security/hash;1"].createInstance(Ci.nsICryptoHash);
+ ch.init(ch.SHA256);
+ ch.update(data, data.length);
+ var hash = ch.finish(false);
+ return hash.slice(0, 32);
+},
+
+addFragment: function(chunkId, fragment)
+{
+ this.addHash(chunkId, this.getHash(fragment));
+},
+
+// This method allows the caller to generate complete hashes that match the
+// prefix of a real fragment, but have different complete hashes.
+addConflict: function(chunkId, fragment)
+{
+ var realHash = this.getHash(fragment);
+ var invalidHash = this.getHash("blah blah blah blah blah");
+ this.addHash(chunkId, realHash.slice(0, 4) + invalidHash.slice(4, 32));
+},
+
+addHash: function(chunkId, hash)
+{
+ var partial = hash.slice(0, 4);
+ if (this.fragments[partial]) {
+ this.fragments[partial].push([chunkId, hash]);
+ } else {
+ this.fragments[partial] = [[chunkId, hash]];
+ }
+},
+
+compareQueries: function(fragments)
+{
+ var expectedQueries = [];
+ for (var i = 0; i < fragments.length; i++) {
+ expectedQueries.push(this.getHash(fragments[i]).slice(0, 4));
+ }
+ do_check_eq(this.queries.length, expectedQueries.length);
+ expectedQueries.sort();
+ this.queries.sort();
+ for (var i = 0; i < this.queries.length; i++) {
+ do_check_eq(this.queries[i], expectedQueries[i]);
+ }
+}
+};
+
+function setupCompleter(table, hits, conflicts)
+{
+ var completer = new DummyCompleter();
+ completer.tableName = table;
+ for (var i = 0; i < hits.length; i++) {
+ var chunkId = hits[i][0];
+ var fragments = hits[i][1];
+ for (var j = 0; j < fragments.length; j++) {
+ completer.addFragment(chunkId, fragments[j]);
+ }
+ }
+ for (var i = 0; i < conflicts.length; i++) {
+ var chunkId = conflicts[i][0];
+ var fragments = conflicts[i][1];
+ for (var j = 0; j < fragments.length; j++) {
+ completer.addConflict(chunkId, fragments[j]);
+ }
+ }
+
+ dbservice.setHashCompleter(table, completer);
+
+ return completer;
+}
+
+function installCompleter(table, fragments, conflictFragments)
+{
+ return setupCompleter(table, fragments, conflictFragments);
+}
+
+function installFailingCompleter(table) {
+ var completer = setupCompleter(table, [], []);
+ completer.alwaysFail = true;
+ return completer;
+}
+
+// Helper assertion for checking dummy completer queries
+gAssertions.completerQueried = function(data, cb)
+{
+ var completer = data[0];
+ completer.compareQueries(data[1]);
+ cb();
+}
+
+function doTest(updates, assertions)
+{
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+}
+
+// Test an add of two partial urls to a fresh database
+function testPartialAdds() {
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : addUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+
+ doTest([update], assertions);
+}
+
+function testPartialAddsWithConflicts() {
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+ // Each result will have both a real match and a conflict
+ var completer = installCompleter('test-phish-simple',
+ [[1, addUrls]],
+ [[1, addUrls]]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : addUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doTest([update], assertions);
+}
+
+// Test whether the fragmenting code does not cause duplicated completions
+function testFragments() {
+ var addUrls = [ "foo.com/a/b/c", "foo.net/", "foo.com/c/" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : addUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+
+ doTest([update], assertions);
+}
+
+// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
+// section 6.2 example 1
+function testSpecFragments() {
+ var probeUrls = [ "a.b.c/1/2.html?param=1" ];
+
+ var addUrls = [ "a.b.c/1/2.html",
+ "a.b.c/",
+ "a.b.c/1/",
+ "b.c/1/2.html?param=1",
+ "b.c/1/2.html",
+ "b.c/",
+ "b.c/1/",
+ "a.b.c/1/2.html?param=1" ];
+
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : probeUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doTest([update], assertions);
+
+}
+
+// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
+// section 6.2 example 2
+function testMoreSpecFragments() {
+ var probeUrls = [ "a.b.c.d.e.f.g/1.html" ];
+
+ var addUrls = [ "a.b.c.d.e.f.g/1.html",
+ "a.b.c.d.e.f.g/",
+ "c.d.e.f.g/1.html",
+ "c.d.e.f.g/",
+ "d.e.f.g/1.html",
+ "d.e.f.g/",
+ "e.f.g/1.html",
+ "e.f.g/",
+ "f.g/1.html",
+ "f.g/" ];
+
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : probeUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doTest([update], assertions);
+
+}
+
+function testFalsePositives() {
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+ // Each result will have no matching complete hashes and a non-matching
+ // conflict
+ var completer = installCompleter('test-phish-simple', [], [[1, addUrls]]);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsDontExist" : addUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doTest([update], assertions);
+}
+
+function testEmptyCompleter() {
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+ // Completer will never return full hashes
+ var completer = installCompleter('test-phish-simple', [], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsDontExist" : addUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doTest([update], assertions);
+}
+
+function testCompleterFailure() {
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+ // Completer will never return full hashes
+ var completer = installFailingCompleter('test-phish-simple');
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsDontExist" : addUrls,
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doTest([update], assertions);
+}
+
+function testMixedSizesSameDomain() {
+ var add1Urls = [ "foo.com/a" ];
+ var add2Urls = [ "foo.com/b" ];
+
+ var update1 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : add1Urls }],
+ 4);
+ var update2 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 2,
+ "urls" : add2Urls }],
+ 32);
+
+ // We should only need to complete the partial hashes
+ var completer = installCompleter('test-phish-simple', [[1, add1Urls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-2",
+ // both urls should match...
+ "urlsExist" : add1Urls.concat(add2Urls),
+ // ... but the completer should only be queried for the partial entry
+ "completerQueried" : [completer, add1Urls]
+ };
+
+ doTest([update1, update2], assertions);
+}
+
+function testMixedSizesDifferentDomains() {
+ var add1Urls = [ "foo.com/a" ];
+ var add2Urls = [ "bar.com/b" ];
+
+ var update1 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : add1Urls }],
+ 4);
+ var update2 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 2,
+ "urls" : add2Urls }],
+ 32);
+
+ // We should only need to complete the partial hashes
+ var completer = installCompleter('test-phish-simple', [[1, add1Urls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-2",
+ // both urls should match...
+ "urlsExist" : add1Urls.concat(add2Urls),
+ // ... but the completer should only be queried for the partial entry
+ "completerQueried" : [completer, add1Urls]
+ };
+
+ doTest([update1, update2], assertions);
+}
+
+function testInvalidHashSize()
+{
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 12); // only 4 and 32 are legal hash sizes
+
+ var addUrls2 = [ "zaz.com/a", "xyz.com/b" ];
+ var update2 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 2,
+ "urls" : addUrls2
+ }],
+ 4);
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:2",
+ "urlsDontExist" : addUrls
+ };
+
+ // A successful update will trigger an error
+ doUpdateTest([update2, update], assertions, updateError, runNextTest);
+}
+
+function testWrongTable()
+{
+ var addUrls = [ "foo.com/a" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+ var completer = installCompleter('test-malware-simple', // wrong table
+ [[1, addUrls]], []);
+
+ // The above installCompleter installs the completer for test-malware-simple,
+ // we want it to be used for test-phish-simple too.
+ dbservice.setHashCompleter("test-phish-simple", completer);
+
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ // The urls were added as phishing urls, but the completer is claiming
+ // that they are malware urls, and we trust the completer in this case.
+ // The result will be discarded, so we can only check for non-existence.
+ "urlsDontExist" : addUrls,
+ // Make sure the completer was actually queried.
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doUpdateTest([update], assertions,
+ function() {
+ // Give the dbservice a chance to (not) cache the result.
+ var timer = new Timer(3000, function() {
+ // The miss earlier will have caused a miss to be cached.
+ // Resetting the completer does not count as an update,
+ // so we will not be probed again.
+ var newCompleter = installCompleter('test-malware-simple', [[1, addUrls]], []); dbservice.setHashCompleter("test-phish-simple",
+ newCompleter);
+
+ var assertions = {
+ "urlsDontExist" : addUrls
+ };
+ checkAssertions(assertions, runNextTest);
+ });
+ }, updateError);
+}
+
+function setupCachedResults(addUrls, part2)
+{
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ // Request the add url. This should cause the completion to be cached.
+ "urlsExist" : addUrls,
+ // Make sure the completer was actually queried.
+ "completerQueried" : [completer, addUrls]
+ };
+
+ doUpdateTest([update], assertions,
+ function() {
+ // Give the dbservice a chance to cache the result.
+ var timer = new Timer(3000, part2);
+ }, updateError);
+}
+
+function testCachedResults()
+{
+ setupCachedResults(["foo.com/a"], function(add) {
+ // This is called after setupCachedResults(). Verify that
+ // checking the url again does not cause a completer request.
+
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
+
+ var assertions = {
+ "urlsExist" : ["foo.com/a"],
+ "completerQueried" : [newCompleter, []]
+ };
+ checkAssertions(assertions, runNextTest);
+ });
+}
+
+function testCachedResultsWithSub() {
+ setupCachedResults(["foo.com/a"], function() {
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
+
+ var removeUpdate = buildPhishingUpdate(
+ [ { "chunkNum" : 2,
+ "chunkType" : "s",
+ "urls": ["1:foo.com/a"] }],
+ 4);
+
+ var assertions = {
+ "urlsDontExist" : ["foo.com/a"],
+ "completerQueried" : [newCompleter, []]
+ }
+
+ doTest([removeUpdate], assertions);
+ });
+}
+
+function testCachedResultsWithExpire() {
+ setupCachedResults(["foo.com/a"], function() {
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
+
+ var expireUpdate =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "ad:1\n";
+
+ var assertions = {
+ "urlsDontExist" : ["foo.com/a"],
+ "completerQueried" : [newCompleter, []]
+ }
+ doTest([expireUpdate], assertions);
+ });
+}
+
+function testCachedResultsUpdate()
+{
+ var existUrls = ["foo.com/a"];
+ setupCachedResults(existUrls, function() {
+ // This is called after setupCachedResults(). Verify that
+ // checking the url again does not cause a completer request.
+
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
+
+ var assertions = {
+ "urlsExist" : existUrls,
+ "completerQueried" : [newCompleter, []]
+ };
+
+ var addUrls = ["foobar.org/a"];
+
+ var update2 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 2,
+ "urls" : addUrls
+ }],
+ 4);
+
+ checkAssertions(assertions, function () {
+ // Apply the update. The cached completes should be gone.
+ doStreamUpdate(update2, function() {
+ // Now the completer gets queried again.
+ var newCompleter2 = installCompleter('test-phish-simple', [[1, existUrls]], []);
+ var assertions2 = {
+ "tableData" : "test-phish-simple;a:1-2",
+ "urlsExist" : existUrls,
+ "completerQueried" : [newCompleter2, existUrls]
+ };
+ checkAssertions(assertions2, runNextTest);
+ }, updateError);
+ });
+ });
+}
+
+function testCachedResultsFailure()
+{
+ var existUrls = ["foo.com/a"];
+ setupCachedResults(existUrls, function() {
+ // This is called after setupCachedResults(). Verify that
+ // checking the url again does not cause a completer request.
+
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
+
+ var assertions = {
+ "urlsExist" : existUrls,
+ "completerQueried" : [newCompleter, []]
+ };
+
+ var addUrls = ["foobar.org/a"];
+
+ var update2 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 2,
+ "urls" : addUrls
+ }],
+ 4);
+
+ checkAssertions(assertions, function() {
+ // Apply the update. The cached completes should be gone.
+ doErrorUpdate("test-phish-simple,test-malware-simple", function() {
+ // Now the completer gets queried again.
+ var newCompleter2 = installCompleter('test-phish-simple', [[1, existUrls]], []);
+ var assertions2 = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : existUrls,
+ "completerQueried" : [newCompleter2, existUrls]
+ };
+ checkAssertions(assertions2, runNextTest);
+ }, updateError);
+ });
+ });
+}
+
+function testErrorList()
+{
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 4);
+ // The update failure should will kill the completes, so the above
+ // must be a prefix to get any hit at all past the update failure.
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : addUrls,
+ // These are complete urls, and will only be completed if the
+ // list is stale.
+ "completerQueried" : [completer, addUrls]
+ };
+
+ // Apply the update.
+ doStreamUpdate(update, function() {
+ // Now the test-phish-simple and test-malware-simple tables are marked
+ // as fresh. Fake an update failure to mark them stale.
+ doErrorUpdate("test-phish-simple,test-malware-simple", function() {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, runNextTest);
+ }, updateError);
+ }, updateError);
+}
+
+
+function testStaleList()
+{
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 32);
+
+ var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : addUrls,
+ // These are complete urls, and will only be completed if the
+ // list is stale.
+ "completerQueried" : [completer, addUrls]
+ };
+
+ // Consider a match stale after one second.
+ prefBranch.setIntPref("urlclassifier.max-complete-age", 1);
+
+ // Apply the update.
+ doStreamUpdate(update, function() {
+ // Now the test-phish-simple and test-malware-simple tables are marked
+ // as fresh. Wait three seconds to make sure the list is marked stale.
+ new Timer(3000, function() {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, function() {
+ prefBranch.setIntPref("urlclassifier.max-complete-age", 2700);
+ runNextTest();
+ });
+ }, updateError);
+ }, updateError);
+}
+
+// Same as testStaleList, but verifies that an empty response still
+// unconfirms the entry.
+function testStaleListEmpty()
+{
+ var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls
+ }],
+ 32);
+
+ var completer = installCompleter('test-phish-simple', [], []);
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ // None of these should match, because they won't be completed
+ "urlsDontExist" : addUrls,
+ // These are complete urls, and will only be completed if the
+ // list is stale.
+ "completerQueried" : [completer, addUrls]
+ };
+
+ // Consider a match stale after one second.
+ prefBranch.setIntPref("urlclassifier.max-complete-age", 1);
+
+ // Apply the update.
+ doStreamUpdate(update, function() {
+ // Now the test-phish-simple and test-malware-simple tables are marked
+ // as fresh. Wait three seconds to make sure the list is marked stale.
+ new Timer(3000, function() {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, function() {
+ prefBranch.setIntPref("urlclassifier.max-complete-age", 2700);
+ runNextTest();
+ });
+ }, updateError);
+ }, updateError);
+}
+
+
+// Verify that different lists (test-phish-simple,
+// test-malware-simple) maintain their freshness separately.
+function testErrorListIndependent()
+{
+ var phishUrls = [ "phish.com/a" ];
+ var malwareUrls = [ "attack.com/a" ];
+ var update = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : phishUrls
+ }],
+ 4);
+ // These have to persist past the update failure, so they must be prefixes,
+ // not completes.
+
+ update += buildMalwareUpdate(
+ [
+ { "chunkNum" : 2,
+ "urls" : malwareUrls
+ }],
+ 32);
+
+ var completer = installCompleter('test-phish-simple', [[1, phishUrls]], []);
+
+ var assertions = {
+ "tableData" : "test-malware-simple;a:2\ntest-phish-simple;a:1",
+ "urlsExist" : phishUrls,
+ "malwareUrlsExist" : malwareUrls,
+ // Only this phishing urls should be completed, because only the phishing
+ // urls will be stale.
+ "completerQueried" : [completer, phishUrls]
+ };
+
+ // Apply the update.
+ doStreamUpdate(update, function() {
+ // Now the test-phish-simple and test-malware-simple tables are
+ // marked as fresh. Fake an update failure to mark *just*
+ // phishing data as stale.
+ doErrorUpdate("test-phish-simple", function() {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, runNextTest);
+ }, updateError);
+ }, updateError);
+}
+
+function run_test()
+{
+ runTests([
+ testPartialAdds,
+ testPartialAddsWithConflicts,
+ testFragments,
+ testSpecFragments,
+ testMoreSpecFragments,
+ testFalsePositives,
+ testEmptyCompleter,
+ testCompleterFailure,
+ testMixedSizesSameDomain,
+ testMixedSizesDifferentDomains,
+ testInvalidHashSize,
+ testWrongTable,
+ testCachedResults,
+ testCachedResultsWithSub,
+ testCachedResultsWithExpire,
+ testCachedResultsUpdate,
+ testCachedResultsFailure,
+ testStaleList,
+ testStaleListEmpty,
+ testErrorList,
+ testErrorListIndependent
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_pref.js b/toolkit/components/url-classifier/tests/unit/test_pref.js
new file mode 100644
index 000000000..68030a246
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_pref.js
@@ -0,0 +1,14 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
+ .getService(Ci.nsIUrlClassifierUtils);
+
+ // The google protocol version should be "2.2" until we enable SB v4
+ // by default.
+ equal(urlUtils.getProtocolVersion("google"), "2.2");
+
+ // Mozilla protocol version will stick to "2.2".
+ equal(urlUtils.getProtocolVersion("mozilla"), "2.2");
+
+ // Unknown provider version will be "2.2".
+ equal(urlUtils.getProtocolVersion("unknown-provider"), "2.2");
+} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/test_prefixset.js b/toolkit/components/url-classifier/tests/unit/test_prefixset.js
new file mode 100644
index 000000000..f2ecc9c2b
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_prefixset.js
@@ -0,0 +1,232 @@
+// newPset: returns an empty nsIUrlClassifierPrefixSet.
+function newPset() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
+ .createInstance(Ci.nsIUrlClassifierPrefixSet);
+ pset.init("all");
+ return pset;
+}
+
+// arrContains: returns true if |arr| contains the element |target|. Uses binary
+// search and requires |arr| to be sorted.
+function arrContains(arr, target) {
+ let start = 0;
+ let end = arr.length - 1;
+ let i = 0;
+
+ while (end > start) {
+ i = start + (end - start >> 1);
+ let value = arr[i];
+
+ if (value < target)
+ start = i+1;
+ else if (value > target)
+ end = i-1;
+ else
+ break;
+ }
+ if (start == end)
+ i = start;
+
+ return (!(i < 0 || i >= arr.length) && arr[i] == target);
+}
+
+// checkContents: Check whether the PrefixSet pset contains
+// the prefixes in the passed array.
+function checkContents(pset, prefixes) {
+ var outcount = {}, outset = {};
+ outset = pset.getPrefixes(outcount);
+ let inset = prefixes;
+ do_check_eq(inset.length, outset.length);
+ inset.sort((x,y) => x - y);
+ for (let i = 0; i < inset.length; i++) {
+ do_check_eq(inset[i], outset[i]);
+ }
+}
+
+function wrappedProbe(pset, prefix) {
+ return pset.contains(prefix);
+};
+
+// doRandomLookups: we use this to test for false membership with random input
+// over the range of prefixes (unsigned 32-bits integers).
+// pset: a nsIUrlClassifierPrefixSet to test.
+// prefixes: an array of prefixes supposed to make up the prefix set.
+// N: number of random lookups to make.
+function doRandomLookups(pset, prefixes, N) {
+ for (let i = 0; i < N; i++) {
+ let randInt = prefixes[0];
+ while (arrContains(prefixes, randInt))
+ randInt = Math.floor(Math.random() * Math.pow(2, 32));
+
+ do_check_false(wrappedProbe(pset, randInt));
+ }
+}
+
+// doExpectedLookups: we use this to test expected membership.
+// pset: a nsIUrlClassifierPrefixSet to test.
+// prefixes:
+function doExpectedLookups(pset, prefixes, N) {
+ for (let i = 0; i < N; i++) {
+ prefixes.forEach(function (x) {
+ dump("Checking " + x + "\n");
+ do_check_true(wrappedProbe(pset, x));
+ });
+ }
+}
+
+// testBasicPset: A very basic test of the prefix set to make sure that it
+// exists and to give a basic example of its use.
+function testBasicPset() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
+ .createInstance(Ci.nsIUrlClassifierPrefixSet);
+ let prefixes = [2,50,100,2000,78000,1593203];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ do_check_true(wrappedProbe(pset, 100));
+ do_check_false(wrappedProbe(pset, 100000));
+ do_check_true(wrappedProbe(pset, 1593203));
+ do_check_false(wrappedProbe(pset, 999));
+ do_check_false(wrappedProbe(pset, 0));
+
+
+ checkContents(pset, prefixes);
+}
+
+function testDuplicates() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
+ .createInstance(Ci.nsIUrlClassifierPrefixSet);
+ let prefixes = [1,1,2,2,2,3,3,3,3,3,3,5,6,6,7,7,9,9,9];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ do_check_true(wrappedProbe(pset, 1));
+ do_check_true(wrappedProbe(pset, 2));
+ do_check_true(wrappedProbe(pset, 5));
+ do_check_true(wrappedProbe(pset, 9));
+ do_check_false(wrappedProbe(pset, 4));
+ do_check_false(wrappedProbe(pset, 8));
+
+
+ checkContents(pset, prefixes);
+}
+
+function testSimplePset() {
+ let pset = newPset();
+ let prefixes = [1,2,100,400,123456789];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ doRandomLookups(pset, prefixes, 100);
+ doExpectedLookups(pset, prefixes, 1);
+
+
+ checkContents(pset, prefixes);
+}
+
+function testReSetPrefixes() {
+ let pset = newPset();
+ let prefixes = [1, 5, 100, 1000, 150000];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ doExpectedLookups(pset, prefixes, 1);
+
+ let secondPrefixes = [12, 50, 300, 2000, 5000, 200000];
+ pset.setPrefixes(secondPrefixes, secondPrefixes.length);
+
+ doExpectedLookups(pset, secondPrefixes, 1);
+ for (let i = 0; i < prefixes.length; i++) {
+ do_check_false(wrappedProbe(pset, prefixes[i]));
+ }
+
+
+ checkContents(pset, secondPrefixes);
+}
+
+function testLoadSaveLargeSet() {
+ let N = 1000;
+ let arr = [];
+
+ for (let i = 0; i < N; i++) {
+ let randInt = Math.floor(Math.random() * Math.pow(2, 32));
+ arr.push(randInt);
+ }
+
+ arr.sort((x,y) => x - y);
+
+ let pset = newPset();
+ pset.setPrefixes(arr, arr.length);
+
+ doExpectedLookups(pset, arr, 1);
+ doRandomLookups(pset, arr, 1000);
+
+ checkContents(pset, arr);
+
+ // Now try to save, restore, and redo the lookups
+ var file = dirSvc.get('ProfLD', Ci.nsIFile);
+ file.append("testLarge.pset");
+
+ pset.storeToFile(file);
+
+ let psetLoaded = newPset();
+ psetLoaded.loadFromFile(file);
+
+ doExpectedLookups(psetLoaded, arr, 1);
+ doRandomLookups(psetLoaded, arr, 1000);
+
+ checkContents(psetLoaded, arr);
+}
+
+function testTinySet() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
+ .createInstance(Ci.nsIUrlClassifierPrefixSet);
+ let prefixes = [1];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ do_check_true(wrappedProbe(pset, 1));
+ do_check_false(wrappedProbe(pset, 100000));
+ checkContents(pset, prefixes);
+
+ prefixes = [];
+ pset.setPrefixes(prefixes, prefixes.length);
+ do_check_false(wrappedProbe(pset, 1));
+ checkContents(pset, prefixes);
+}
+
+function testLoadSaveNoDelta() {
+ let N = 100;
+ let arr = [];
+
+ for (let i = 0; i < N; i++) {
+ // construct a tree without deltas by making the distance
+ // between entries larger than 16 bits
+ arr.push(((1 << 16) + 1) * i);
+ }
+
+ let pset = newPset();
+ pset.setPrefixes(arr, arr.length);
+
+ doExpectedLookups(pset, arr, 1);
+
+ var file = dirSvc.get('ProfLD', Ci.nsIFile);
+ file.append("testNoDelta.pset");
+
+ pset.storeToFile(file);
+ pset.loadFromFile(file);
+
+ doExpectedLookups(pset, arr, 1);
+}
+
+var tests = [testBasicPset,
+ testSimplePset,
+ testReSetPrefixes,
+ testLoadSaveLargeSet,
+ testDuplicates,
+ testTinySet,
+ testLoadSaveNoDelta];
+
+function run_test() {
+ // None of the tests use |executeSoon| or any sort of callbacks, so we can
+ // just run them in succession.
+ for (let i = 0; i < tests.length; i++) {
+ dump("Running " + tests[i].name + "\n");
+ tests[i]();
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_provider_url.js b/toolkit/components/url-classifier/tests/unit/test_provider_url.js
new file mode 100644
index 000000000..9a946dc3f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_provider_url.js
@@ -0,0 +1,34 @@
+Cu.import("resource://testing-common/AppInfo.jsm", this);
+Cu.import("resource://gre/modules/Services.jsm");
+
+function updateVersion(version) {
+ updateAppInfo({ version });
+}
+
+add_test(function test_provider_url() {
+ let urls = [
+ "browser.safebrowsing.provider.google.updateURL",
+ "browser.safebrowsing.provider.google.gethashURL",
+ "browser.safebrowsing.provider.mozilla.updateURL",
+ "browser.safebrowsing.provider.mozilla.gethashURL"
+ ];
+
+ let versions = [
+ "49.0",
+ "49.0.1",
+ "49.0a1",
+ "49.0b1",
+ "49.0esr",
+ "49.0.1esr"
+ ];
+
+ for (let version of versions) {
+ for (let url of urls) {
+ updateVersion(version);
+ let value = Services.urlFormatter.formatURLPref(url);
+ Assert.notEqual(value.indexOf("&appver=49.0&"), -1);
+ }
+ }
+
+ run_next_test();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
new file mode 100644
index 000000000..45309ba54
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
@@ -0,0 +1,23 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
+ .getService(Ci.nsIUrlClassifierUtils);
+
+ // No list at all.
+ let requestNoList = urlUtils.makeUpdateRequestV4([], [], 0);
+
+ // Only one valid list name.
+ let requestOneValid =
+ urlUtils.makeUpdateRequestV4(["goog-phish-proto"], ["AAAAAA"], 1);
+
+ // Only one invalid list name.
+ let requestOneInvalid =
+ urlUtils.makeUpdateRequestV4(["bad-list-name"], ["AAAAAA"], 1);
+
+ // One valid and one invalid list name.
+ let requestOneInvalidOneValid =
+ urlUtils.makeUpdateRequestV4(["goog-phish-proto", "bad-list-name"],
+ ["AAAAAA", "AAAAAA"], 2);
+
+ equal(requestNoList, requestOneInvalid);
+ equal(requestOneValid, requestOneInvalidOneValid);
+} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/test_streamupdater.js b/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
new file mode 100644
index 000000000..e5abc4e91
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
@@ -0,0 +1,288 @@
+function doTest(updates, assertions, expectError)
+{
+ if (expectError) {
+ doUpdateTest(updates, assertions, updateError, runNextTest);
+ } else {
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+ }
+}
+
+// Never use the same URLs for multiple tests, because we aren't guaranteed
+// to reset the database between tests.
+function testFillDb() {
+ var add1Urls = [ "zaz.com/a", "yxz.com/c" ];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1",
+ "urlsExist" : add1Urls
+ };
+
+ doTest([update], assertions, false);
+}
+
+function testSimpleForward() {
+ var add1Urls = [ "foo-simple.com/a", "bar-simple.com/c" ];
+ var add2Urls = [ "foo-simple.com/b" ];
+ var add3Urls = [ "bar-simple.com/d" ];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate(
+ [{ "chunkNum" : 2,
+ "urls" : add2Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ var update3 = buildBareUpdate(
+ [{ "chunkNum" : 3,
+ "urls" : add3Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ var assertions = {
+ "tableData" : "test-phish-simple;a:1-3",
+ "urlsExist" : add1Urls.concat(add2Urls).concat(add3Urls)
+ };
+
+ doTest([update], assertions, false);
+}
+
+// Make sure that a nested forward (a forward within a forward) causes
+// the update to fail.
+function testNestedForward() {
+ var add1Urls = [ "foo-nested.com/a", "bar-nested.com/c" ];
+ var add2Urls = [ "foo-nested.com/b" ];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate(
+ [{ "chunkNum" : 2 }]);
+ var update3 = buildBareUpdate(
+ [{ "chunkNum" : 3,
+ "urls" : add1Urls }]);
+
+ update2 += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ var assertions = {
+ "tableData" : "",
+ "urlsDontExist" : add1Urls.concat(add2Urls)
+ };
+
+ doTest([update], assertions, true);
+}
+
+// An invalid URL forward causes the update to fail.
+function testInvalidUrlForward() {
+ var add1Urls = [ "foo-invalid.com/a", "bar-invalid.com/c" ];
+
+ var update = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls }]);
+ update += "u:asdf://blah/blah\n"; // invalid URL scheme
+
+ // add1Urls is present, but that is an artifact of the way we do the test.
+ var assertions = {
+ "tableData" : "",
+ "urlsExist" : add1Urls
+ };
+
+ doTest([update], assertions, true);
+}
+
+// A failed network request causes the update to fail.
+function testErrorUrlForward() {
+ var add1Urls = [ "foo-forward.com/a", "bar-forward.com/c" ];
+
+ var update = buildPhishingUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls }]);
+ update += "u:http://test.invalid/asdf/asdf\n"; // invalid URL scheme
+
+ // add1Urls is present, but that is an artifact of the way we do the test.
+ var assertions = {
+ "tableData" : "",
+ "urlsExist" : add1Urls
+ };
+
+ doTest([update], assertions, true);
+}
+
+function testMultipleTables() {
+ var add1Urls = [ "foo-multiple.com/a", "bar-multiple.com/c" ];
+ var add2Urls = [ "foo-multiple.com/b" ];
+ var add3Urls = [ "bar-multiple.com/d" ];
+ var add4Urls = [ "bar-multiple.com/e" ];
+ var add6Urls = [ "bar-multiple.com/g" ];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate(
+ [{ "chunkNum" : 2,
+ "urls" : add2Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ update += "i:test-malware-simple\n";
+
+ var update3 = buildBareUpdate(
+ [{ "chunkNum" : 3,
+ "urls" : add3Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ update += "i:test-unwanted-simple\n";
+ var update4 = buildBareUpdate(
+ [{ "chunkNum" : 4,
+ "urls" : add4Urls }]);
+ update += "u:data:," + encodeURIComponent(update4) + "\n";
+
+ update += "i:test-block-simple\n";
+ var update6 = buildBareUpdate(
+ [{ "chunkNum" : 6,
+ "urls" : add6Urls }]);
+ update += "u:data:," + encodeURIComponent(update6) + "\n";
+
+ var assertions = {
+ "tableData" : "test-block-simple;a:6\ntest-malware-simple;a:3\ntest-phish-simple;a:1-2\ntest-unwanted-simple;a:4",
+ "urlsExist" : add1Urls.concat(add2Urls),
+ "malwareUrlsExist" : add3Urls,
+ "unwantedUrlsExist" : add4Urls,
+ "blockedUrlsExist" : add6Urls
+ };
+
+ doTest([update], assertions, false);
+}
+
+function testUrlInMultipleTables() {
+ var add1Urls = [ "foo-forward.com/a" ];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate(
+ [{ "chunkNum" : 1,
+ "urls" : add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ update += "i:test-malware-simple\n";
+ var update2 = buildBareUpdate(
+ [{ "chunkNum" : 2,
+ "urls" : add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ update += "i:test-unwanted-simple\n";
+ var update3 = buildBareUpdate(
+ [{ "chunkNum" : 3,
+ "urls" : add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ var assertions = {
+ "tableData" : "test-malware-simple;a:2\ntest-phish-simple;a:1\ntest-unwanted-simple;a:3",
+ "urlExistInMultipleTables" : { url: add1Urls,
+ tables: "test-malware-simple,test-phish-simple,test-unwanted-simple" }
+ };
+
+ doTest([update], assertions, false);
+}
+
+function Observer(callback) {
+ this.observe = callback;
+}
+
+Observer.prototype =
+{
+QueryInterface: function(iid)
+{
+ if (!iid.equals(Ci.nsISupports) &&
+ !iid.equals(Ci.nsIObserver)) {
+ throw Cr.NS_ERROR_NO_INTERFACE;
+ }
+ return this;
+}
+};
+
+// Tests a database reset request.
+function testReset() {
+ // The moz-phish-simple table is populated separately from the other update in
+ // a separate update request. Therefore it should not be reset when we run the
+ // updates later in this function.
+ var mozAddUrls = [ "moz-reset.com/a" ];
+ var mozUpdate = buildMozPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : mozAddUrls
+ }]);
+
+ var dataUpdate = "data:," + encodeURIComponent(mozUpdate);
+
+ streamUpdater.downloadUpdates(mozTables, "", true,
+ dataUpdate, () => {}, updateError, updateError);
+
+ var addUrls1 = [ "foo-reset.com/a", "foo-reset.com/b" ];
+ var update1 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 1,
+ "urls" : addUrls1
+ }]);
+
+ var update2 = "n:1000\nr:pleasereset\n";
+
+ var addUrls3 = [ "bar-reset.com/a", "bar-reset.com/b" ];
+ var update3 = buildPhishingUpdate(
+ [
+ { "chunkNum" : 3,
+ "urls" : addUrls3
+ }]);
+
+ var assertions = {
+ "tableData" : "moz-phish-simple;a:1\ntest-phish-simple;a:3", // tables that should still be there.
+ "mozPhishingUrlsExist" : mozAddUrls, // mozAddUrls added prior to the reset
+ // but it should still exist after reset.
+ "urlsExist" : addUrls3, // addUrls3 added after the reset.
+ "urlsDontExist" : addUrls1 // addUrls1 added prior to the reset
+ };
+
+ // Use these update responses in order. The update request only
+ // contains test-*-simple tables so the reset will only apply to these.
+ doTest([update1, update2, update3], assertions, false);
+}
+
+
+function run_test()
+{
+ runTests([
+ testSimpleForward,
+ testNestedForward,
+ testInvalidUrlForward,
+ testErrorUrlForward,
+ testMultipleTables,
+ testUrlInMultipleTables,
+ testReset
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js b/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
new file mode 100644
index 000000000..f7c51b956
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
@@ -0,0 +1,37 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
+ .getService(Ci.nsIUrlClassifierUtils);
+
+ // Test list name to threat type conversion.
+
+ equal(urlUtils.convertListNameToThreatType("goog-malware-proto"), 1);
+ equal(urlUtils.convertListNameToThreatType("googpub-phish-proto"), 2);
+ equal(urlUtils.convertListNameToThreatType("goog-unwanted-proto"), 3);
+ equal(urlUtils.convertListNameToThreatType("goog-phish-proto"), 5);
+
+ try {
+ urlUtils.convertListNameToThreatType("bad-list-name");
+ ok(false, "Bad list name should lead to exception.");
+ } catch (e) {}
+
+ try {
+ urlUtils.convertListNameToThreatType("bad-list-name");
+ ok(false, "Bad list name should lead to exception.");
+ } catch (e) {}
+
+ // Test threat type to list name conversion.
+ equal(urlUtils.convertThreatTypeToListNames(1), "goog-malware-proto");
+ equal(urlUtils.convertThreatTypeToListNames(2), "googpub-phish-proto,test-phish-proto");
+ equal(urlUtils.convertThreatTypeToListNames(3), "goog-unwanted-proto,test-unwanted-proto");
+ equal(urlUtils.convertThreatTypeToListNames(5), "goog-phish-proto");
+
+ try {
+ urlUtils.convertThreatTypeToListNames(0);
+ ok(false, "Bad threat type should lead to exception.");
+ } catch (e) {}
+
+ try {
+ urlUtils.convertThreatTypeToListNames(100);
+ ok(false, "Bad threat type should lead to exception.");
+ } catch (e) {}
+} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/xpcshell.ini b/toolkit/components/url-classifier/tests/unit/xpcshell.ini
new file mode 100644
index 000000000..c34d575c6
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/xpcshell.ini
@@ -0,0 +1,24 @@
+[DEFAULT]
+head = head_urlclassifier.js
+tail = tail_urlclassifier.js
+skip-if = toolkit == 'android'
+support-files =
+ data/digest1.chunk
+ data/digest2.chunk
+
+[test_addsub.js]
+[test_bug1274685_unowned_list.js]
+[test_backoff.js]
+[test_dbservice.js]
+[test_hashcompleter.js]
+# Bug 752243: Profile cleanup frequently fails
+#skip-if = os == "mac" || os == "linux"
+[test_partial.js]
+[test_prefixset.js]
+[test_threat_type_conversion.js]
+[test_provider_url.js]
+[test_streamupdater.js]
+[test_digest256.js]
+[test_listmanager.js]
+[test_pref.js]
+[test_safebrowsing_protobuf.js]