summaryrefslogtreecommitdiffstats
path: root/toolkit/components/url-classifier
diff options
context:
space:
mode:
authorMatt A. Tobin <email@mattatobin.com>2020-02-25 15:07:00 -0500
committerMatt A. Tobin <email@mattatobin.com>2020-02-25 15:07:00 -0500
commit0ddd00f1959c78ce37c14fef3c83401408fca3bf (patch)
treed408e02767c86cf8aac3acbb86722b03c77ede6f /toolkit/components/url-classifier
parent20f0905b33cbb18d1caa80c55e2f552c2e18957b (diff)
downloadUXP-0ddd00f1959c78ce37c14fef3c83401408fca3bf.tar
UXP-0ddd00f1959c78ce37c14fef3c83401408fca3bf.tar.gz
UXP-0ddd00f1959c78ce37c14fef3c83401408fca3bf.tar.lz
UXP-0ddd00f1959c78ce37c14fef3c83401408fca3bf.tar.xz
UXP-0ddd00f1959c78ce37c14fef3c83401408fca3bf.zip
Issue #439 - Remove tests from toolkit/
Diffstat (limited to 'toolkit/components/url-classifier')
-rw-r--r--toolkit/components/url-classifier/moz.build2
-rw-r--r--toolkit/components/url-classifier/tests/UrlClassifierTestUtils.jsm98
-rw-r--r--toolkit/components/url-classifier/tests/gtest/Common.cpp78
-rw-r--r--toolkit/components/url-classifier/tests/gtest/Common.h26
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp279
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp97
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp88
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp98
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp159
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp165
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp24
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp52
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestTable.cpp47
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp755
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp276
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp559
-rw-r--r--toolkit/components/url-classifier/tests/gtest/moz.build27
-rw-r--r--toolkit/components/url-classifier/tests/jar.mn2
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/.eslintrc.js8
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html144
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/bad.css1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/bad.css^headers^1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/basic.vtt27
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/bug_1281083.html35
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/chrome.ini23
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html213
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html24
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifierCommon.js112
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifierFrame.html57
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifierHelper.js201
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/cleanWorker.js10
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/dnt.html31
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/dnt.sjs9
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.css1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.css^headers^1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.js1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.js^headers^2
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evilWorker.js3
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/gethash.sjs130
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/gethashFrame.html62
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/good.js1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/import.css3
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/mochitest.ini39
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/ping.sjs16
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/raptor.jpgbin49629 -> 0 bytes
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/seek.webmbin215529 -> 0 bytes
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html56
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html305
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html50
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier.html65
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html149
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html76
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html121
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classify_track.html162
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_donottrack.html150
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_gethash.html157
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_lookup_system_principal.html29
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html154
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html87
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html107
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html153
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/track.html7
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js3
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/update.sjs114
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/vp9.webmbin97465 -> 0 bytes
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html15
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/workerFrame.html65
-rw-r--r--toolkit/components/url-classifier/tests/moz.build18
-rw-r--r--toolkit/components/url-classifier/tests/unit/.eslintrc.js7
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest1.chunkbin939 -> 0 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest2.chunk2
-rw-r--r--toolkit/components/url-classifier/tests/unit/head_urlclassifier.js429
-rw-r--r--toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js1
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_addsub.js488
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_backoff.js89
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js32
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_dbservice.js314
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_digest256.js147
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_hashcompleter.js403
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_listmanager.js376
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_partial.js825
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_pref.js14
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_prefixset.js232
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_provider_url.js34
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js23
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_streamupdater.js288
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js37
-rw-r--r--toolkit/components/url-classifier/tests/unit/xpcshell.ini24
-rw-r--r--toolkit/components/url-classifier/tests/unittests.xul188
90 files changed, 0 insertions, 9984 deletions
diff --git a/toolkit/components/url-classifier/moz.build b/toolkit/components/url-classifier/moz.build
index d8856ee4a..b6e630abd 100644
--- a/toolkit/components/url-classifier/moz.build
+++ b/toolkit/components/url-classifier/moz.build
@@ -4,8 +4,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-TEST_DIRS += ['tests']
-
XPIDL_SOURCES += [
'nsIUrlClassifierDBService.idl',
'nsIUrlClassifierHashCompleter.idl',
diff --git a/toolkit/components/url-classifier/tests/UrlClassifierTestUtils.jsm b/toolkit/components/url-classifier/tests/UrlClassifierTestUtils.jsm
deleted file mode 100644
index 615769473..000000000
--- a/toolkit/components/url-classifier/tests/UrlClassifierTestUtils.jsm
+++ /dev/null
@@ -1,98 +0,0 @@
-"use strict";
-
-this.EXPORTED_SYMBOLS = ["UrlClassifierTestUtils"];
-
-const {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
-
-const TRACKING_TABLE_NAME = "mochitest-track-simple";
-const TRACKING_TABLE_PREF = "urlclassifier.trackingTable";
-const WHITELIST_TABLE_NAME = "mochitest-trackwhite-simple";
-const WHITELIST_TABLE_PREF = "urlclassifier.trackingWhitelistTable";
-
-Cu.import("resource://gre/modules/Services.jsm");
-
-this.UrlClassifierTestUtils = {
-
- addTestTrackers() {
- // Add some URLs to the tracking databases
- let trackingURL1 = "tracking.example.com/";
- let trackingURL2 = "itisatracker.org/";
- let trackingURL3 = "trackertest.org/";
- let whitelistedURL = "itisatrap.org/?resource=itisatracker.org";
-
- let trackingUpdate =
- "n:1000\ni:" + TRACKING_TABLE_NAME + "\nad:3\n" +
- "a:1:32:" + trackingURL1.length + "\n" +
- trackingURL1 + "\n" +
- "a:2:32:" + trackingURL2.length + "\n" +
- trackingURL2 + "\n" +
- "a:3:32:" + trackingURL3.length + "\n" +
- trackingURL3 + "\n";
- let whitelistUpdate =
- "n:1000\ni:" + WHITELIST_TABLE_NAME + "\nad:1\n" +
- "a:1:32:" + whitelistedURL.length + "\n" +
- whitelistedURL + "\n";
-
- var tables = [
- {
- pref: TRACKING_TABLE_PREF,
- name: TRACKING_TABLE_NAME,
- update: trackingUpdate
- },
- {
- pref: WHITELIST_TABLE_PREF,
- name: WHITELIST_TABLE_NAME,
- update: whitelistUpdate
- }
- ];
-
- return this.useTestDatabase(tables);
- },
-
- cleanupTestTrackers() {
- Services.prefs.clearUserPref(TRACKING_TABLE_PREF);
- Services.prefs.clearUserPref(WHITELIST_TABLE_PREF);
- },
-
- /**
- * Add some entries to a test tracking protection database, and resets
- * back to the default database after the test ends.
- *
- * @return {Promise}
- */
- useTestDatabase(tables) {
- for (var table of tables) {
- Services.prefs.setCharPref(table.pref, table.name);
- }
-
- return new Promise((resolve, reject) => {
- let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"].
- getService(Ci.nsIUrlClassifierDBService);
- let listener = {
- QueryInterface: iid => {
- if (iid.equals(Ci.nsISupports) ||
- iid.equals(Ci.nsIUrlClassifierUpdateObserver))
- return listener;
-
- throw Cr.NS_ERROR_NO_INTERFACE;
- },
- updateUrlRequested: url => { },
- streamFinished: status => { },
- updateError: errorCode => {
- reject("Couldn't update classifier.");
- },
- updateSuccess: requestedTimeout => {
- resolve();
- }
- };
-
- for (var table of tables) {
- dbService.beginUpdate(listener, table.name, "");
- dbService.beginStream("", "");
- dbService.updateStream(table.update);
- dbService.finishStream();
- dbService.finishUpdate();
- }
- });
- },
-};
diff --git a/toolkit/components/url-classifier/tests/gtest/Common.cpp b/toolkit/components/url-classifier/tests/gtest/Common.cpp
deleted file mode 100644
index b5f024b38..000000000
--- a/toolkit/components/url-classifier/tests/gtest/Common.cpp
+++ /dev/null
@@ -1,78 +0,0 @@
-#include "Common.h"
-#include "HashStore.h"
-#include "Classifier.h"
-#include "nsAppDirectoryServiceDefs.h"
-#include "nsTArray.h"
-#include "nsIThread.h"
-#include "nsThreadUtils.h"
-#include "nsUrlClassifierUtils.h"
-
-using namespace mozilla;
-using namespace mozilla::safebrowsing;
-
-template<typename Function>
-void RunTestInNewThread(Function&& aFunction) {
- nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(mozilla::Forward<Function>(aFunction));
- nsCOMPtr<nsIThread> testingThread;
- nsresult rv = NS_NewThread(getter_AddRefs(testingThread), r);
- ASSERT_EQ(rv, NS_OK);
- testingThread->Shutdown();
-}
-
-already_AddRefed<nsIFile>
-GetFile(const nsTArray<nsString>& path)
-{
- nsCOMPtr<nsIFile> file;
- nsresult rv = NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return nullptr;
- }
-
- for (uint32_t i = 0; i < path.Length(); i++) {
- file->Append(path[i]);
- }
- return file.forget();
-}
-
-void ApplyUpdate(nsTArray<TableUpdate*>& updates)
-{
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-
- UniquePtr<Classifier> classifier(new Classifier());
- classifier->Open(*file);
-
- {
- // Force nsIUrlClassifierUtils loading on main thread
- // because nsIUrlClassifierDBService will not run in advance
- // in gtest.
- nsresult rv;
- nsCOMPtr<nsIUrlClassifierUtils> dummy =
- do_GetService(NS_URLCLASSIFIERUTILS_CONTRACTID, &rv);
- ASSERT_TRUE(NS_SUCCEEDED(rv));
- }
-
- RunTestInNewThread([&] () -> void {
- classifier->ApplyUpdates(&updates);
- });
-}
-
-void ApplyUpdate(TableUpdate* update)
-{
- nsTArray<TableUpdate*> updates = { update };
- ApplyUpdate(updates);
-}
-
-void
-PrefixArrayToPrefixStringMap(const nsTArray<nsCString>& prefixArray,
- PrefixStringMap& out)
-{
- out.Clear();
-
- for (uint32_t i = 0; i < prefixArray.Length(); i++) {
- const nsCString& prefix = prefixArray[i];
- nsCString* prefixString = out.LookupOrAdd(prefix.Length());
- prefixString->Append(prefix.BeginReading(), prefix.Length());
- }
-}
-
diff --git a/toolkit/components/url-classifier/tests/gtest/Common.h b/toolkit/components/url-classifier/tests/gtest/Common.h
deleted file mode 100644
index c9a9cdf7e..000000000
--- a/toolkit/components/url-classifier/tests/gtest/Common.h
+++ /dev/null
@@ -1,26 +0,0 @@
-#include "HashStore.h"
-#include "nsIFile.h"
-#include "nsTArray.h"
-#include "gtest/gtest.h"
-
-using namespace mozilla;
-using namespace mozilla::safebrowsing;
-
-template<typename Function>
-void RunTestInNewThread(Function&& aFunction);
-
-// Return nsIFile with root directory - NS_APP_USER_PROFILE_50_DIR
-// Sub-directories are passed in path argument.
-already_AddRefed<nsIFile>
-GetFile(const nsTArray<nsString>& path);
-
-// ApplyUpdate will call |ApplyUpdates| of Classifier within a new thread
-void ApplyUpdate(nsTArray<TableUpdate*>& updates);
-
-void ApplyUpdate(TableUpdate* update);
-
-// This function converts lexigraphic-sorted prefixes to a hashtable
-// which key is prefix size and value is concatenated prefix string.
-void PrefixArrayToPrefixStringMap(const nsTArray<nsCString>& prefixArray,
- PrefixStringMap& out);
-
diff --git a/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp b/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp
deleted file mode 100644
index dba2fc2c1..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp
+++ /dev/null
@@ -1,279 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <set>
-
-#include "gtest/gtest.h"
-#include "ChunkSet.h"
-#include "mozilla/ArrayUtils.h"
-
-TEST(UrlClassifierChunkSet, Empty)
-{
- mozilla::safebrowsing::ChunkSet chunkSet;
- mozilla::safebrowsing::ChunkSet removeSet;
-
- removeSet.Set(0);
-
- ASSERT_FALSE(chunkSet.Has(0));
- ASSERT_FALSE(chunkSet.Has(1));
- ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK);
- ASSERT_TRUE(chunkSet.Length() == 0);
-
- chunkSet.Set(0);
-
- ASSERT_TRUE(chunkSet.Has(0));
- ASSERT_TRUE(chunkSet.Length() == 1);
- ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK);
- ASSERT_FALSE(chunkSet.Has(0));
- ASSERT_TRUE(chunkSet.Length() == 0);
-}
-
-TEST(UrlClassifierChunkSet, Main)
-{
- static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
-
- mozilla::safebrowsing::ChunkSet chunkSet;
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- chunkSet.Set(testVals[i]);
- }
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- ASSERT_TRUE(chunkSet.Has(testVals[i]));
- }
-
- ASSERT_FALSE(chunkSet.Has(3));
- ASSERT_FALSE(chunkSet.Has(4));
- ASSERT_FALSE(chunkSet.Has(9));
- ASSERT_FALSE(chunkSet.Has(11));
-
- ASSERT_TRUE(chunkSet.Length() == MOZ_ARRAY_LENGTH(testVals));
-}
-
-TEST(UrlClassifierChunkSet, Merge)
-{
- static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
- static int mergeVals[] = {9, 3, 4, 20, 14, 16};
-
- mozilla::safebrowsing::ChunkSet chunkSet;
- mozilla::safebrowsing::ChunkSet mergeSet;
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- chunkSet.Set(testVals[i]);
- }
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- mergeSet.Set(mergeVals[i]);
- }
-
- chunkSet.Merge(mergeSet);
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- ASSERT_TRUE(chunkSet.Has(testVals[i]));
- }
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- ASSERT_TRUE(chunkSet.Has(mergeVals[i]));
- }
-
- // -1 because 14 is duplicated in both sets
- ASSERT_TRUE(chunkSet.Length() ==
- MOZ_ARRAY_LENGTH(testVals) + MOZ_ARRAY_LENGTH(mergeVals) - 1);
-
- ASSERT_FALSE(chunkSet.Has(11));
- ASSERT_FALSE(chunkSet.Has(15));
- ASSERT_FALSE(chunkSet.Has(17));
- ASSERT_FALSE(chunkSet.Has(18));
- ASSERT_FALSE(chunkSet.Has(19));
-}
-
-TEST(UrlClassifierChunkSet, Merge2)
-{
- static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
- static int mergeVals[] = {9, 3, 4, 20, 14, 16};
- static int mergeVals2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
-
- mozilla::safebrowsing::ChunkSet chunkSet;
- mozilla::safebrowsing::ChunkSet mergeSet;
- mozilla::safebrowsing::ChunkSet mergeSet2;
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- chunkSet.Set(testVals[i]);
- }
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- mergeSet.Set(mergeVals[i]);
- }
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals2); i++) {
- mergeSet2.Set(mergeVals2[i]);
- }
-
- chunkSet.Merge(mergeSet);
- chunkSet.Merge(mergeSet2);
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- ASSERT_TRUE(chunkSet.Has(testVals[i]));
- }
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- ASSERT_TRUE(chunkSet.Has(mergeVals[i]));
- }
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals2); i++) {
- ASSERT_TRUE(chunkSet.Has(mergeVals2[i]));
- }
-
- ASSERT_FALSE(chunkSet.Has(15));
- ASSERT_FALSE(chunkSet.Has(17));
- ASSERT_FALSE(chunkSet.Has(18));
- ASSERT_FALSE(chunkSet.Has(19));
-}
-
-TEST(UrlClassifierChunkSet, Stress)
-{
- mozilla::safebrowsing::ChunkSet chunkSet;
- mozilla::safebrowsing::ChunkSet mergeSet;
- std::set<int> refSet;
- std::set<int> refMergeSet;
- static const int TEST_ITERS = 7000;
- static const int REMOVE_ITERS = 3000;
- static const int TEST_RANGE = 10000;
-
- // Construction by Set
- for (int i = 0; i < TEST_ITERS; i++) {
- int chunk = rand() % TEST_RANGE;
- chunkSet.Set(chunk);
- refSet.insert(chunk);
- }
-
- // Same elements as reference set
- for (auto it = refSet.begin(); it != refSet.end(); ++it) {
- ASSERT_TRUE(chunkSet.Has(*it));
- }
-
- // Hole punching via Remove
- for (int i = 0; i < REMOVE_ITERS; i++) {
- int chunk = rand() % TEST_RANGE;
- mozilla::safebrowsing::ChunkSet helpChunk;
- helpChunk.Set(chunk);
-
- chunkSet.Remove(helpChunk);
- refSet.erase(chunk);
-
- ASSERT_FALSE(chunkSet.Has(chunk));
- }
-
- // Should have chunks present in reference set
- // Should not have chunks absent in reference set
- for (int it = 0; it < TEST_RANGE; ++it) {
- auto found = refSet.find(it);
- if (chunkSet.Has(it)) {
- ASSERT_FALSE(found == refSet.end());
- } else {
- ASSERT_TRUE(found == refSet.end());
- }
- }
-
- // Construct set to merge with
- for (int i = 0; i < TEST_ITERS; i++) {
- int chunk = rand() % TEST_RANGE;
- mergeSet.Set(chunk);
- refMergeSet.insert(chunk);
- }
-
- // Merge set constructed correctly
- for (auto it = refMergeSet.begin(); it != refMergeSet.end(); ++it) {
- ASSERT_TRUE(mergeSet.Has(*it));
- }
-
- mozilla::safebrowsing::ChunkSet origSet;
- origSet = chunkSet;
-
- chunkSet.Merge(mergeSet);
- refSet.insert(refMergeSet.begin(), refMergeSet.end());
-
- // Check for presence of elements from both source
- // Should not have chunks absent in reference set
- for (int it = 0; it < TEST_RANGE; ++it) {
- auto found = refSet.find(it);
- if (chunkSet.Has(it)) {
- ASSERT_FALSE(found == refSet.end());
- } else {
- ASSERT_TRUE(found == refSet.end());
- }
- }
-
- // Unmerge
- chunkSet.Remove(origSet);
- for (int it = 0; it < TEST_RANGE; ++it) {
- if (origSet.Has(it)) {
- ASSERT_FALSE(chunkSet.Has(it));
- } else if (mergeSet.Has(it)) {
- ASSERT_TRUE(chunkSet.Has(it));
- }
- }
-}
-
-TEST(UrlClassifierChunkSet, RemoveClear)
-{
- static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
- static int mergeVals[] = {3, 4, 9, 16, 20};
-
- mozilla::safebrowsing::ChunkSet chunkSet;
- mozilla::safebrowsing::ChunkSet mergeSet;
- mozilla::safebrowsing::ChunkSet removeSet;
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- chunkSet.Set(testVals[i]);
- removeSet.Set(testVals[i]);
- }
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- mergeSet.Set(mergeVals[i]);
- }
-
- ASSERT_TRUE(chunkSet.Merge(mergeSet) == NS_OK);
- ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK);
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- ASSERT_TRUE(chunkSet.Has(mergeVals[i]));
- }
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- ASSERT_FALSE(chunkSet.Has(testVals[i]));
- }
-
- chunkSet.Clear();
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- ASSERT_FALSE(chunkSet.Has(mergeVals[i]));
- }
-}
-
-TEST(UrlClassifierChunkSet, Serialize)
-{
- static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
- static int mergeVals[] = {3, 4, 9, 16, 20};
-
- mozilla::safebrowsing::ChunkSet chunkSet;
- mozilla::safebrowsing::ChunkSet mergeSet;
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
- chunkSet.Set(testVals[i]);
- }
-
- for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
- mergeSet.Set(mergeVals[i]);
- }
-
- chunkSet.Merge(mergeSet);
-
- nsAutoCString mergeResult;
- chunkSet.Serialize(mergeResult);
-
- printf("mergeResult: %s\n", mergeResult.get());
-
- nsAutoCString expected(NS_LITERAL_CSTRING("1-10,12-14,16,20"));
-
- ASSERT_TRUE(mergeResult.Equals(expected));
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp b/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp
deleted file mode 100644
index bdb9eebb0..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp
+++ /dev/null
@@ -1,97 +0,0 @@
-#include "HashStore.h"
-#include "nsPrintfCString.h"
-#include "string.h"
-#include "gtest/gtest.h"
-#include "mozilla/Unused.h"
-
-using namespace mozilla;
-using namespace mozilla::safebrowsing;
-
-static const char* kFilesInV2[] = {".pset", ".sbstore"};
-static const char* kFilesInV4[] = {".pset", ".metadata"};
-
-#define V2_TABLE "gtest-malware-simple"
-#define V4_TABLE1 "goog-malware-proto"
-#define V4_TABLE2 "goog-phish-proto"
-
-#define ROOT_DIR NS_LITERAL_STRING("safebrowsing")
-#define SB_FILE(x, y) NS_ConvertUTF8toUTF16(nsPrintfCString("%s%s",x, y))
-
-template<typename T, size_t N>
-void CheckFileExist(const char* table, const T (&files)[N], bool expectExists)
-{
- for (uint32_t i = 0; i < N; i++) {
- // This is just a quick way to know if this is v4 table
- NS_ConvertUTF8toUTF16 SUB_DIR(strstr(table, "-proto") ? "google4" : "");
- nsCOMPtr<nsIFile> file =
- GetFile(nsTArray<nsString> { ROOT_DIR, SUB_DIR, SB_FILE(table, files[i]) });
-
- bool exists;
- file->Exists(&exists);
-
- nsAutoCString path;
- file->GetNativePath(path);
- ASSERT_EQ(expectExists, exists) << path.get();
- }
-}
-
-TEST(FailUpdate, CheckTableReset)
-{
- const bool FULL_UPDATE = true;
- const bool PARTIAL_UPDATE = false;
-
- // Apply V2 update
- {
- auto update = new TableUpdateV2(NS_LITERAL_CSTRING(V2_TABLE));
- Unused << update->NewAddChunk(1);
-
- ApplyUpdate(update);
-
- // A successful V2 update should create .pset & .sbstore files
- CheckFileExist(V2_TABLE, kFilesInV2, true);
- }
-
- // Helper function to generate table update data
- auto func = [](TableUpdateV4* update, bool full, const char* str) {
- update->SetFullUpdate(full);
- std::string prefix(str);
- update->NewPrefixes(prefix.length(), prefix);
- };
-
- // Apply V4 update for table1
- {
- auto update = new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE1));
- func(update, FULL_UPDATE, "test_prefix");
-
- ApplyUpdate(update);
-
- // A successful V4 update should create .pset & .metadata files
- CheckFileExist(V4_TABLE1, kFilesInV4, true);
- }
-
- // Apply V4 update for table2
- {
- auto update = new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE2));
- func(update, FULL_UPDATE, "test_prefix");
-
- ApplyUpdate(update);
-
- CheckFileExist(V4_TABLE2, kFilesInV4, true);
- }
-
- // Apply V4 update with the same prefix in previous full udpate
- // This should cause an update error.
- {
- auto update = new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE1));
- func(update, PARTIAL_UPDATE, "test_prefix");
-
- ApplyUpdate(update);
-
- // A fail update should remove files for that table
- CheckFileExist(V4_TABLE1, kFilesInV4, false);
-
- // A fail update should NOT remove files for the other tables
- CheckFileExist(V2_TABLE, kFilesInV2, true);
- CheckFileExist(V4_TABLE2, kFilesInV4, true);
- }
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp b/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp
deleted file mode 100644
index 00525f704..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp
+++ /dev/null
@@ -1,88 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "LookupCacheV4.h"
-#include "Common.h"
-
-#define GTEST_SAFEBROWSING_DIR NS_LITERAL_CSTRING("safebrowsing")
-#define GTEST_TABLE NS_LITERAL_CSTRING("gtest-malware-proto")
-
-typedef nsCString _Fragment;
-typedef nsTArray<nsCString> _PrefixArray;
-
-// Generate a hash prefix from string
-static const nsCString
-GeneratePrefix(const _Fragment& aFragment, uint8_t aLength)
-{
- Completion complete;
- nsCOMPtr<nsICryptoHash> cryptoHash = do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID);
- complete.FromPlaintext(aFragment, cryptoHash);
-
- nsCString hash;
- hash.Assign((const char *)complete.buf, aLength);
- return hash;
-}
-
-static UniquePtr<LookupCacheV4>
-SetupLookupCacheV4(const _PrefixArray& prefixArray)
-{
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-
- file->AppendNative(GTEST_SAFEBROWSING_DIR);
-
- UniquePtr<LookupCacheV4> cache = MakeUnique<LookupCacheV4>(GTEST_TABLE, EmptyCString(), file);
- nsresult rv = cache->Init();
- EXPECT_EQ(rv, NS_OK);
-
- PrefixStringMap map;
- PrefixArrayToPrefixStringMap(prefixArray, map);
- rv = cache->Build(map);
- EXPECT_EQ(rv, NS_OK);
-
- return Move(cache);
-}
-
-void
-TestHasPrefix(const _Fragment& aFragment, bool aExpectedHas, bool aExpectedComplete)
-{
- _PrefixArray array = { GeneratePrefix(_Fragment("bravo.com/"), 32),
- GeneratePrefix(_Fragment("browsing.com/"), 8),
- GeneratePrefix(_Fragment("gound.com/"), 5),
- GeneratePrefix(_Fragment("small.com/"), 4)
- };
-
- RunTestInNewThread([&] () -> void {
- UniquePtr<LookupCache> cache = SetupLookupCacheV4(array);
-
- Completion lookupHash;
- nsCOMPtr<nsICryptoHash> cryptoHash = do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID);
- lookupHash.FromPlaintext(aFragment, cryptoHash);
-
- bool has, complete;
- nsresult rv = cache->Has(lookupHash, &has, &complete);
-
- EXPECT_EQ(rv, NS_OK);
- EXPECT_EQ(has, aExpectedHas);
- EXPECT_EQ(complete, aExpectedComplete);
-
- cache->ClearAll();
- });
-
-}
-
-TEST(LookupCacheV4, HasComplete)
-{
- TestHasPrefix(_Fragment("bravo.com/"), true, true);
-}
-
-TEST(LookupCacheV4, HasPrefix)
-{
- TestHasPrefix(_Fragment("browsing.com/"), true, false);
-}
-
-TEST(LookupCacheV4, Nomatch)
-{
- TestHasPrefix(_Fragment("nomatch.com/"), false, false);
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp b/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp
deleted file mode 100644
index 72ff08a1e..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp
+++ /dev/null
@@ -1,98 +0,0 @@
-#include "LookupCache.h"
-#include "LookupCacheV4.h"
-#include "HashStore.h"
-#include "gtest/gtest.h"
-#include "nsAppDirectoryServiceDefs.h"
-
-namespace mozilla {
-namespace safebrowsing {
-
-class PerProviderDirectoryTestUtils {
-public:
- template<typename T>
- static nsIFile* InspectStoreDirectory(const T& aT)
- {
- return aT.mStoreDirectory;
- }
-};
-
-} // end of namespace safebrowsing
-} // end of namespace mozilla
-
-using namespace mozilla;
-using namespace mozilla::safebrowsing;
-
-template<typename T>
-void VerifyPrivateStorePath(const char* aTableName,
- const char* aProvider,
- nsIFile* aRootDir,
- bool aUsePerProviderStore)
-{
- nsString rootStorePath;
- nsresult rv = aRootDir->GetPath(rootStorePath);
- EXPECT_EQ(rv, NS_OK);
-
- T target(nsCString(aTableName), nsCString(aProvider), aRootDir);
-
- nsIFile* privateStoreDirectory =
- PerProviderDirectoryTestUtils::InspectStoreDirectory(target);
-
- nsString privateStorePath;
- rv = privateStoreDirectory->GetPath(privateStorePath);
- ASSERT_EQ(rv, NS_OK);
-
- nsString expectedPrivateStorePath = rootStorePath;
-
- if (aUsePerProviderStore) {
- // Use API to append "provider" to the root directoy path
- nsCOMPtr<nsIFile> expectedPrivateStoreDir;
- rv = aRootDir->Clone(getter_AddRefs(expectedPrivateStoreDir));
- ASSERT_EQ(rv, NS_OK);
-
- expectedPrivateStoreDir->AppendNative(nsCString(aProvider));
- rv = expectedPrivateStoreDir->GetPath(expectedPrivateStorePath);
- ASSERT_EQ(rv, NS_OK);
- }
-
- printf("table: %s\nprovider: %s\nroot path: %s\nprivate path: %s\n\n",
- aTableName,
- aProvider,
- NS_ConvertUTF16toUTF8(rootStorePath).get(),
- NS_ConvertUTF16toUTF8(privateStorePath).get());
-
- ASSERT_TRUE(privateStorePath == expectedPrivateStorePath);
-}
-
-TEST(PerProviderDirectory, LookupCache)
-{
- RunTestInNewThread([] () -> void {
- nsCOMPtr<nsIFile> rootDir;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir));
-
- // For V2 tables (NOT ending with '-proto'), root directory should be
- // used as the private store.
- VerifyPrivateStorePath<LookupCacheV2>("goog-phish-shavar", "google", rootDir, false);
-
- // For V4 tables, if provider is found, use per-provider subdirectory;
- // If not found, use root directory.
- VerifyPrivateStorePath<LookupCacheV4>("goog-noprovider-proto", "", rootDir, false);
- VerifyPrivateStorePath<LookupCacheV4>("goog-phish-proto", "google4", rootDir, true);
- });
-}
-
-TEST(PerProviderDirectory, HashStore)
-{
- RunTestInNewThread([] () -> void {
- nsCOMPtr<nsIFile> rootDir;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir));
-
- // For V2 tables (NOT ending with '-proto'), root directory should be
- // used as the private store.
- VerifyPrivateStorePath<HashStore>("goog-phish-shavar", "google", rootDir, false);
-
- // For V4 tables, if provider is found, use per-provider subdirectory;
- // If not found, use root directory.
- VerifyPrivateStorePath<HashStore>("goog-noprovider-proto", "", rootDir, false);
- VerifyPrivateStorePath<HashStore>("goog-phish-proto", "google4", rootDir, true);
- });
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp b/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp
deleted file mode 100644
index ea6ffb5e6..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp
+++ /dev/null
@@ -1,159 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-#include "gtest/gtest.h"
-#include "ProtocolParser.h"
-#include "mozilla/EndianUtils.h"
-
-using namespace mozilla;
-using namespace mozilla::safebrowsing;
-
-typedef FetchThreatListUpdatesResponse_ListUpdateResponse ListUpdateResponse;
-
-static bool
-InitUpdateResponse(ListUpdateResponse* aUpdateResponse,
- ThreatType aThreatType,
- const nsACString& aState,
- const nsACString& aChecksum,
- bool isFullUpdate,
- const nsTArray<uint32_t>& aFixedLengthPrefixes,
- bool aDoPrefixEncoding);
-
-static void
-DumpBinary(const nsACString& aBinary);
-
-TEST(ProtocolParser, UpdateWait)
-{
- // Top level response which contains a list of update response
- // for different lists.
- FetchThreatListUpdatesResponse response;
-
- auto r = response.mutable_list_update_responses()->Add();
- InitUpdateResponse(r, SOCIAL_ENGINEERING_PUBLIC,
- nsCString("sta\x00te", 6),
- nsCString("check\x0sum", 9),
- true,
- {0, 1, 2, 3},
- false /* aDoPrefixEncoding */ );
-
- // Set min wait duration.
- auto minWaitDuration = response.mutable_minimum_wait_duration();
- minWaitDuration->set_seconds(8);
- minWaitDuration->set_nanos(1 * 1000000000);
-
- std::string s;
- response.SerializeToString(&s);
-
- DumpBinary(nsCString(s.c_str(), s.length()));
-
- ProtocolParser* p = new ProtocolParserProtobuf();
- p->AppendStream(nsCString(s.c_str(), s.length()));
- p->End();
- ASSERT_EQ(p->UpdateWaitSec(), 9u);
- delete p;
-}
-
-TEST(ProtocolParser, SingleValueEncoding)
-{
- // Top level response which contains a list of update response
- // for different lists.
- FetchThreatListUpdatesResponse response;
-
- auto r = response.mutable_list_update_responses()->Add();
-
- const char* expectedPrefix = "\x00\x01\x02\x00";
- if (!InitUpdateResponse(r, SOCIAL_ENGINEERING_PUBLIC,
- nsCString("sta\x00te", 6),
- nsCString("check\x0sum", 9),
- true,
- // As per spec, we should interpret the prefix as uint32
- // in little endian before encoding.
- {LittleEndian::readUint32(expectedPrefix)},
- true /* aDoPrefixEncoding */ )) {
- printf("Failed to initialize update response.");
- ASSERT_TRUE(false);
- return;
- }
-
- // Set min wait duration.
- auto minWaitDuration = response.mutable_minimum_wait_duration();
- minWaitDuration->set_seconds(8);
- minWaitDuration->set_nanos(1 * 1000000000);
-
- std::string s;
- response.SerializeToString(&s);
-
- // Feed data to the protocol parser.
- ProtocolParser* p = new ProtocolParserProtobuf();
- p->SetRequestedTables({ nsCString("googpub-phish-proto") });
- p->AppendStream(nsCString(s.c_str(), s.length()));
- p->End();
-
- auto& tus = p->GetTableUpdates();
- auto tuv4 = TableUpdate::Cast<TableUpdateV4>(tus[0]);
- auto& prefixMap = tuv4->Prefixes();
- for (auto iter = prefixMap.Iter(); !iter.Done(); iter.Next()) {
- // This prefix map should contain only a single 4-byte prefixe.
- ASSERT_EQ(iter.Key(), 4u);
-
- // The fixed-length prefix string from ProtcolParser should
- // exactly match the expected prefix string.
- auto& prefix = iter.Data()->GetPrefixString();
- ASSERT_TRUE(prefix.Equals(nsCString(expectedPrefix, 4)));
- }
-
- delete p;
-}
-
-static bool
-InitUpdateResponse(ListUpdateResponse* aUpdateResponse,
- ThreatType aThreatType,
- const nsACString& aState,
- const nsACString& aChecksum,
- bool isFullUpdate,
- const nsTArray<uint32_t>& aFixedLengthPrefixes,
- bool aDoPrefixEncoding)
-{
- aUpdateResponse->set_threat_type(aThreatType);
- aUpdateResponse->set_new_client_state(aState.BeginReading(), aState.Length());
- aUpdateResponse->mutable_checksum()->set_sha256(aChecksum.BeginReading(), aChecksum.Length());
- aUpdateResponse->set_response_type(isFullUpdate ? ListUpdateResponse::FULL_UPDATE
- : ListUpdateResponse::PARTIAL_UPDATE);
-
- auto additions = aUpdateResponse->mutable_additions()->Add();
-
- if (!aDoPrefixEncoding) {
- additions->set_compression_type(RAW);
- auto rawHashes = additions->mutable_raw_hashes();
- rawHashes->set_prefix_size(4);
- auto prefixes = rawHashes->mutable_raw_hashes();
- for (auto p : aFixedLengthPrefixes) {
- char buffer[4];
- NativeEndian::copyAndSwapToBigEndian(buffer, &p, 1);
- prefixes->append(buffer, 4);
- }
- return true;
- }
-
- if (1 != aFixedLengthPrefixes.Length()) {
- printf("This function only supports single value encoding.\n");
- return false;
- }
-
- uint32_t firstValue = aFixedLengthPrefixes[0];
- additions->set_compression_type(RICE);
- auto riceHashes = additions->mutable_rice_hashes();
- riceHashes->set_first_value(firstValue);
- riceHashes->set_num_entries(0);
-
- return true;
-}
-
-static void DumpBinary(const nsACString& aBinary)
-{
- nsCString s;
- for (size_t i = 0; i < aBinary.Length(); i++) {
- s.AppendPrintf("\\x%.2X", (uint8_t)aBinary[i]);
- }
- printf("%s\n", s.get());
-} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp b/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp
deleted file mode 100644
index f03d27358..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp
+++ /dev/null
@@ -1,165 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-#include "gtest/gtest.h"
-#include "RiceDeltaDecoder.h"
-#include "mozilla/ArrayUtils.h"
-
-using namespace mozilla;
-using namespace mozilla::safebrowsing;
-
-struct TestingData {
- std::vector<uint32_t> mExpectedDecoded;
- std::vector<uint8_t> mEncoded;
- uint32_t mRiceParameter;
-};
-
-static bool runOneTest(TestingData& aData);
-
-TEST(RiceDeltaDecoder, SingleEncodedValue) {
- TestingData td = { { 99 }, { 99 }, 0 };
-
- ASSERT_TRUE(runOneTest(td));
-}
-
-// In this batch of tests, the encoded data would be like
-// what we originally receive from the network. See comment
-// in |runOneTest| for more detail.
-TEST(RiceDeltaDecoder, Empty) {
-
- // The following structure and testing data is copied from Chromium source code:
- //
- // https://chromium.googlesource.com/chromium/src.git/+/950f9975599768b6a08c7146cb4befa161be87aa/components/safe_browsing_db/v4_rice_unittest.cc#75
- //
- // and will be translated to our own testing format.
-
- struct RiceDecodingTestInfo {
- uint32_t mRiceParameter;
- std::vector<uint32_t> mDeltas;
- std::string mEncoded;
-
- RiceDecodingTestInfo(uint32_t aRiceParameter,
- const std::vector<uint32_t>& aDeltas,
- const std::string& aEncoded)
- : mRiceParameter(aRiceParameter)
- , mDeltas(aDeltas)
- , mEncoded(aEncoded)
- {
- }
- };
-
- // Copyright 2016 The Chromium Authors. All rights reserved.
- // Use of this source code is governed by a BSD-style license that can be
- // found in the media/webrtc/trunk/webrtc/LICENSE.
-
- // ----- Start of Chromium test code ----
- const std::vector<RiceDecodingTestInfo> TESTING_DATA_CHROMIUM = {
- RiceDecodingTestInfo(2, {15, 9}, "\xf7\x2"),
- RiceDecodingTestInfo(
- 28, {1777762129, 2093280223, 924369848},
- "\xbf\xa8\x3f\xfb\xfc\xfb\x5e\x27\xe6\xc3\x1d\xc6\x38"),
- RiceDecodingTestInfo(
- 28, {62763050, 1046523781, 192522171, 1800511020, 4442775, 582142548},
- "\x54\x60\x7b\xe7\x0a\x5f\xc1\xdc\xee\x69\xde"
- "\xfe\x58\x3c\xa3\xd6\xa5\xf2\x10\x8c\x4a\x59"
- "\x56\x00"),
- RiceDecodingTestInfo(
- 28, {26067715, 344823336, 8420095, 399843890, 95029378, 731622412,
- 35811335, 1047558127, 1117722715, 78698892},
- "\x06\x86\x1b\x23\x14\xcb\x46\xf2\xaf\x07\x08\xc9\x88\x54\x1f\x41\x04"
- "\xd5\x1a\x03\xeb\xe6\x3a\x80\x13\x91\x7b\xbf\x83\xf3\xb7\x85\xf1\x29"
- "\x18\xb3\x61\x09"),
- RiceDecodingTestInfo(
- 27, {225846818, 328287420, 166748623, 29117720, 552397365, 350353215,
- 558267528, 4738273, 567093445, 28563065, 55077698, 73091685,
- 339246010, 98242620, 38060941, 63917830, 206319759, 137700744},
- "\x89\x98\xd8\x75\xbc\x44\x91\xeb\x39\x0c\x3e\x30\x9a\x78\xf3\x6a\xd4"
- "\xd9\xb1\x9f\xfb\x70\x3e\x44\x3e\xa3\x08\x67\x42\xc2\x2b\x46\x69\x8e"
- "\x3c\xeb\xd9\x10\x5a\x43\x9a\x32\xa5\x2d\x4e\x77\x0f\x87\x78\x20\xb6"
- "\xab\x71\x98\x48\x0c\x9e\x9e\xd7\x23\x0c\x13\x43\x2c\xa9\x01"),
- RiceDecodingTestInfo(
- 28, {339784008, 263128563, 63871877, 69723256, 826001074, 797300228,
- 671166008, 207712688},
- std::string("\x21\xc5\x02\x91\xf9\x82\xd7\x57\xb8\xe9\x3c\xf0\xc8\x4f"
- "\xe8\x64\x8d\x77\x62\x04\xd6\x85\x3f\x1c\x97\x00\x04\x1b"
- "\x17\xc6",
- 30)),
- RiceDecodingTestInfo(
- 28, {471820069, 196333855, 855579133, 122737976, 203433838, 85354544,
- 1307949392, 165938578, 195134475, 553930435, 49231136},
- "\x95\x9c\x7d\xb0\x8f\xe8\xd9\xbd\xfe\x8c\x7f\x81\x53\x0d\x75\xdc\x4e"
- "\x40\x18\x0c\x9a\x45\x3d\xa8\xdc\xfa\x26\x59\x40\x9e\x16\x08\x43\x77"
- "\xc3\x4e\x04\x01\xa4\xe6\x5d\x00"),
- RiceDecodingTestInfo(
- 27, {87336845, 129291033, 30906211, 433549264, 30899891, 53207875,
- 11959529, 354827862, 82919275, 489637251, 53561020, 336722992,
- 408117728, 204506246, 188216092, 9047110, 479817359, 230317256},
- "\x1a\x4f\x69\x2a\x63\x9a\xf6\xc6\x2e\xaf\x73\xd0\x6f\xd7\x31\xeb\x77"
- "\x1d\x43\xe3\x2b\x93\xce\x67\x8b\x59\xf9\x98\xd4\xda\x4f\x3c\x6f\xb0"
- "\xe8\xa5\x78\x8d\x62\x36\x18\xfe\x08\x1e\x78\xd8\x14\x32\x24\x84\x61"
- "\x1c\xf3\x37\x63\xc4\xa0\x88\x7b\x74\xcb\x64\xc8\x5c\xba\x05"),
- RiceDecodingTestInfo(
- 28, {297968956, 19709657, 259702329, 76998112, 1023176123, 29296013,
- 1602741145, 393745181, 177326295, 55225536, 75194472},
- "\xf1\x94\x0a\x87\x6c\x5f\x96\x90\xe3\xab\xf7\xc0\xcb\x2d\xe9\x76\xdb"
- "\xf8\x59\x63\xc1\x6f\x7c\x99\xe3\x87\x5f\xc7\x04\xde\xb9\x46\x8e\x54"
- "\xc0\xac\x4a\x03\x0d\x6c\x8f\x00"),
- RiceDecodingTestInfo(
- 28, {532220688, 780594691, 436816483, 163436269, 573044456, 1069604,
- 39629436, 211410997, 227714491, 381562898, 75610008, 196754597,
- 40310339, 15204118, 99010842},
- "\x41\x2c\xe4\xfe\x06\xdc\x0d\xbd\x31\xa5\x04\xd5\x6e\xdd\x9b\x43\xb7"
- "\x3f\x11\x24\x52\x10\x80\x4f\x96\x4b\xd4\x80\x67\xb2\xdd\x52\xc9\x4e"
- "\x02\xc6\xd7\x60\xde\x06\x92\x52\x1e\xdd\x35\x64\x71\x26\x2c\xfe\xcf"
- "\x81\x46\xb2\x79\x01"),
- RiceDecodingTestInfo(
- 28, {219354713, 389598618, 750263679, 554684211, 87381124, 4523497,
- 287633354, 801308671, 424169435, 372520475, 277287849},
- "\xb2\x2c\x26\x3a\xcd\x66\x9c\xdb\x5f\x07\x2e\x6f\xe6\xf9\x21\x10\x52"
- "\xd5\x94\xf4\x82\x22\x48\xf9\x9d\x24\xf6\xff\x2f\xfc\x6d\x3f\x21\x65"
- "\x1b\x36\x34\x56\xea\xc4\x21\x00"),
- };
-
- // ----- End of Chromium test code ----
-
- for (auto tdc : TESTING_DATA_CHROMIUM) {
- // Populate chromium testing data to our native testing data struct.
- TestingData d;
-
- d.mRiceParameter = tdc.mRiceParameter; // Populate rice parameter.
-
- // Populate encoded data from std::string to vector<uint8>.
- d.mEncoded.resize(tdc.mEncoded.size());
- memcpy(&d.mEncoded[0], tdc.mEncoded.c_str(), tdc.mEncoded.size());
-
- // Populate deltas to expected decoded data. The first value would be just
- // set to an arbitrary value, say 7, to avoid any assumption to the
- // first value in the implementation.
- d.mExpectedDecoded.resize(tdc.mDeltas.size() + 1);
- for (size_t i = 0; i < d.mExpectedDecoded.size(); i++) {
- if (0 == i) {
- d.mExpectedDecoded[i] = 7; // "7" is an arbitrary starting value
- } else {
- d.mExpectedDecoded[i] = d.mExpectedDecoded[i - 1] + tdc.mDeltas[i - 1];
- }
- }
-
- ASSERT_TRUE(runOneTest(d));
- }
-}
-
-static bool
-runOneTest(TestingData& aData)
-{
- RiceDeltaDecoder decoder(&aData.mEncoded[0], aData.mEncoded.size());
-
- std::vector<uint32_t> decoded(aData.mExpectedDecoded.size());
-
- uint32_t firstValue = aData.mExpectedDecoded[0];
- bool rv = decoder.Decode(aData.mRiceParameter,
- firstValue,
- decoded.size() - 1, // # of entries (first value not included).
- &decoded[0]);
-
- return rv && decoded == aData.mExpectedDecoded;
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp b/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp
deleted file mode 100644
index fe6f28960..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp
+++ /dev/null
@@ -1,24 +0,0 @@
-#include "safebrowsing.pb.h"
-#include "gtest/gtest.h"
-
-TEST(SafeBrowsingProtobuf, Empty)
-{
- using namespace mozilla::safebrowsing;
-
- const std::string CLIENT_ID = "firefox";
-
- // Construct a simple update request.
- FetchThreatListUpdatesRequest r;
- r.set_allocated_client(new ClientInfo());
- r.mutable_client()->set_client_id(CLIENT_ID);
-
- // Then serialize.
- std::string s;
- r.SerializeToString(&s);
-
- // De-serialize.
- FetchThreatListUpdatesRequest r2;
- r2.ParseFromString(s);
-
- ASSERT_EQ(r2.client().client_id(), CLIENT_ID);
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp b/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp
deleted file mode 100644
index 89ed74be6..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp
+++ /dev/null
@@ -1,52 +0,0 @@
-#include "Entries.h"
-#include "mozilla/EndianUtils.h"
-
-TEST(SafebrowsingHash, ToFromUint32)
-{
- using namespace mozilla::safebrowsing;
-
- // typedef SafebrowsingHash<PREFIX_SIZE, PrefixComparator> Prefix;
- // typedef nsTArray<Prefix> PrefixArray;
-
- const char PREFIX_RAW[4] = { 0x1, 0x2, 0x3, 0x4 };
- uint32_t PREFIX_UINT32;
- memcpy(&PREFIX_UINT32, PREFIX_RAW, 4);
-
- Prefix p;
- p.Assign(nsCString(PREFIX_RAW, 4));
- ASSERT_EQ(p.ToUint32(), PREFIX_UINT32);
-
- p.FromUint32(PREFIX_UINT32);
- ASSERT_EQ(memcmp(PREFIX_RAW, p.buf, 4), 0);
-}
-
-TEST(SafebrowsingHash, Compare)
-{
- using namespace mozilla;
- using namespace mozilla::safebrowsing;
-
- Prefix p1, p2, p3;
-
- // The order of p1,p2,p3 is "p1 == p3 < p2"
-#if MOZ_LITTLE_ENDIAN
- p1.Assign(nsCString("\x01\x00\x00\x00", 4));
- p2.Assign(nsCString("\x00\x00\x00\x01", 4));
- p3.Assign(nsCString("\x01\x00\x00\x00", 4));
-#else
- p1.Assign(nsCString("\x00\x00\x00\x01", 4));
- p2.Assign(nsCString("\x01\x00\x00\x00", 4));
- p3.Assign(nsCString("\x00\x00\x00\x01", 4));
-#endif
-
- // Make sure "p1 == p3 < p2" is true
- // on both little and big endian machine.
-
- ASSERT_EQ(p1.Compare(p2), -1);
- ASSERT_EQ(p1.Compare(p1), 0);
- ASSERT_EQ(p2.Compare(p1), 1);
- ASSERT_EQ(p1.Compare(p3), 0);
-
- ASSERT_TRUE(p1 < p2);
- ASSERT_TRUE(p1 == p1);
- ASSERT_TRUE(p1 == p3);
-} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/gtest/TestTable.cpp b/toolkit/components/url-classifier/tests/gtest/TestTable.cpp
deleted file mode 100644
index 307587459..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestTable.cpp
+++ /dev/null
@@ -1,47 +0,0 @@
-#include "gtest/gtest.h"
-#include "nsUrlClassifierDBService.h"
-
-using namespace mozilla::safebrowsing;
-
-void
-TestResponseCode(const char* table, nsresult result)
-{
- nsCString tableName(table);
- ASSERT_EQ(TablesToResponse(tableName), result);
-}
-
-TEST(UrlClassifierTable, ResponseCode)
-{
- // malware URIs.
- TestResponseCode("goog-malware-shavar", NS_ERROR_MALWARE_URI);
- TestResponseCode("test-malware-simple", NS_ERROR_MALWARE_URI);
- TestResponseCode("goog-phish-shavar,test-malware-simple", NS_ERROR_MALWARE_URI);
- TestResponseCode("test-malware-simple,mozstd-track-digest256,mozplugin-block-digest256", NS_ERROR_MALWARE_URI);
-
- // phish URIs.
- TestResponseCode("goog-phish-shavar", NS_ERROR_PHISHING_URI);
- TestResponseCode("test-phish-simple", NS_ERROR_PHISHING_URI);
- TestResponseCode("test-phish-simple,mozplugin-block-digest256", NS_ERROR_PHISHING_URI);
- TestResponseCode("mozstd-track-digest256,test-phish-simple,goog-unwanted-shavar", NS_ERROR_PHISHING_URI);
-
- // unwanted URIs.
- TestResponseCode("goog-unwanted-shavar", NS_ERROR_UNWANTED_URI);
- TestResponseCode("test-unwanted-simple", NS_ERROR_UNWANTED_URI);
- TestResponseCode("mozplugin-unwanted-digest256,mozfull-track-digest256", NS_ERROR_UNWANTED_URI);
- TestResponseCode("test-block-simple,mozfull-track-digest256,test-unwanted-simple", NS_ERROR_UNWANTED_URI);
-
- // track URIs.
- TestResponseCode("test-track-simple", NS_ERROR_TRACKING_URI);
- TestResponseCode("mozstd-track-digest256", NS_ERROR_TRACKING_URI);
- TestResponseCode("test-block-simple,mozstd-track-digest256", NS_ERROR_TRACKING_URI);
-
- // block URIs
- TestResponseCode("test-block-simple", NS_ERROR_BLOCKED_URI);
- TestResponseCode("mozplugin-block-digest256", NS_ERROR_BLOCKED_URI);
- TestResponseCode("mozplugin2-block-digest256", NS_ERROR_BLOCKED_URI);
-
- TestResponseCode("test-trackwhite-simple", NS_OK);
- TestResponseCode("mozstd-trackwhite-digest256", NS_OK);
- TestResponseCode("goog-badbinurl-shavar", NS_OK);
- TestResponseCode("goog-downloadwhite-digest256", NS_OK);
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
deleted file mode 100644
index 470a88ba2..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
+++ /dev/null
@@ -1,755 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-#include "Common.h"
-#include "Classifier.h"
-#include "HashStore.h"
-#include "nsAppDirectoryServiceDefs.h"
-#include "nsIFile.h"
-#include "nsIThread.h"
-#include "string.h"
-#include "gtest/gtest.h"
-#include "nsThreadUtils.h"
-
-using namespace mozilla;
-using namespace mozilla::safebrowsing;
-
-typedef nsCString _Prefix;
-typedef nsTArray<_Prefix> _PrefixArray;
-
-#define GTEST_SAFEBROWSING_DIR NS_LITERAL_CSTRING("safebrowsing")
-#define GTEST_TABLE NS_LITERAL_CSTRING("gtest-malware-proto")
-#define GTEST_PREFIXFILE NS_LITERAL_CSTRING("gtest-malware-proto.pset")
-
-// This function removes common elements of inArray and outArray from
-// outArray. This is used by partial update testcase to ensure partial update
-// data won't contain prefixes we already have.
-static void
-RemoveIntersection(const _PrefixArray& inArray, _PrefixArray& outArray)
-{
- for (uint32_t i = 0; i < inArray.Length(); i++) {
- int32_t idx = outArray.BinaryIndexOf(inArray[i]);
- if (idx >= 0) {
- outArray.RemoveElementAt(idx);
- }
- }
-}
-
-// This fucntion removes elements from outArray by index specified in
-// removal array.
-static void
-RemoveElements(const nsTArray<uint32_t>& removal, _PrefixArray& outArray)
-{
- for (int32_t i = removal.Length() - 1; i >= 0; i--) {
- outArray.RemoveElementAt(removal[i]);
- }
-}
-
-static void
-MergeAndSortArray(const _PrefixArray& array1,
- const _PrefixArray& array2,
- _PrefixArray& output)
-{
- output.Clear();
- output.AppendElements(array1);
- output.AppendElements(array2);
- output.Sort();
-}
-
-static void
-CalculateCheckSum(_PrefixArray& prefixArray, nsCString& checksum)
-{
- prefixArray.Sort();
-
- nsresult rv;
- nsCOMPtr<nsICryptoHash> cryptoHash =
- do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID, &rv);
-
- cryptoHash->Init(nsICryptoHash::SHA256);
- for (uint32_t i = 0; i < prefixArray.Length(); i++) {
- const _Prefix& prefix = prefixArray[i];
- cryptoHash->Update(reinterpret_cast<uint8_t*>(
- const_cast<char*>(prefix.get())), prefix.Length());
- }
- cryptoHash->Finish(false, checksum);
-}
-
-// N: Number of prefixes, MIN/MAX: minimum/maximum prefix size
-// This function will append generated prefixes to outArray.
-static void
-CreateRandomSortedPrefixArray(uint32_t N,
- uint32_t MIN,
- uint32_t MAX,
- _PrefixArray& outArray)
-{
- outArray.SetCapacity(outArray.Length() + N);
-
- const uint32_t range = (MAX - MIN + 1);
-
- for (uint32_t i = 0; i < N; i++) {
- uint32_t prefixSize = (rand() % range) + MIN;
- _Prefix prefix;
- prefix.SetLength(prefixSize);
-
- while (true) {
- char* dst = prefix.BeginWriting();
- for (uint32_t j = 0; j < prefixSize; j++) {
- dst[j] = rand() % 256;
- }
-
- if (!outArray.Contains(prefix)) {
- outArray.AppendElement(prefix);
- break;
- }
- }
- }
-
- outArray.Sort();
-}
-
-// N: Number of removal indices, MAX: maximum index
-static void
-CreateRandomRemovalIndices(uint32_t N,
- uint32_t MAX,
- nsTArray<uint32_t>& outArray)
-{
- for (uint32_t i = 0; i < N; i++) {
- uint32_t idx = rand() % MAX;
- if (!outArray.Contains(idx)) {
- outArray.InsertElementSorted(idx);
- }
- }
-}
-
-// Function to generate TableUpdateV4.
-static void
-GenerateUpdateData(bool fullUpdate,
- PrefixStringMap& add,
- nsTArray<uint32_t>* removal,
- nsCString* checksum,
- nsTArray<TableUpdate*>& tableUpdates)
-{
- TableUpdateV4* tableUpdate = new TableUpdateV4(GTEST_TABLE);
- tableUpdate->SetFullUpdate(fullUpdate);
-
- for (auto iter = add.ConstIter(); !iter.Done(); iter.Next()) {
- nsCString* pstring = iter.Data();
- std::string str(pstring->BeginReading(), pstring->Length());
-
- tableUpdate->NewPrefixes(iter.Key(), str);
- }
-
- if (removal) {
- tableUpdate->NewRemovalIndices(removal->Elements(), removal->Length());
- }
-
- if (checksum) {
- std::string stdChecksum;
- stdChecksum.assign(const_cast<char*>(checksum->BeginReading()), checksum->Length());
-
- tableUpdate->NewChecksum(stdChecksum);
- }
-
- tableUpdates.AppendElement(tableUpdate);
-}
-
-static void
-VerifyPrefixSet(PrefixStringMap& expected)
-{
- // Verify the prefix set is written to disk.
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-
- file->AppendNative(GTEST_SAFEBROWSING_DIR);
- file->AppendNative(GTEST_PREFIXFILE);
-
- RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
- load->Init(GTEST_TABLE);
-
- PrefixStringMap prefixesInFile;
- load->LoadFromFile(file);
- load->GetPrefixes(prefixesInFile);
-
- for (auto iter = expected.ConstIter(); !iter.Done(); iter.Next()) {
- nsCString* expectedPrefix = iter.Data();
- nsCString* resultPrefix = prefixesInFile.Get(iter.Key());
-
- ASSERT_TRUE(*resultPrefix == *expectedPrefix);
- }
-}
-
-static void
-Clear()
-{
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-
- UniquePtr<Classifier> classifier(new Classifier());
- classifier->Open(*file);
- classifier->Reset();
-}
-
-static void
-testUpdateFail(nsTArray<TableUpdate*>& tableUpdates)
-{
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-
- UniquePtr<Classifier> classifier(new Classifier());
- classifier->Open(*file);
-
- RunTestInNewThread([&] () -> void {
- nsresult rv = classifier->ApplyUpdates(&tableUpdates);
- ASSERT_TRUE(NS_FAILED(rv));
- });
-}
-
-static void
-testUpdate(nsTArray<TableUpdate*>& tableUpdates,
- PrefixStringMap& expected)
-{
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
-
- UniquePtr<Classifier> classifier(new Classifier());
- classifier->Open(*file);
-
- RunTestInNewThread([&] () -> void {
- nsresult rv = classifier->ApplyUpdates(&tableUpdates);
- ASSERT_TRUE(rv == NS_OK);
-
- VerifyPrefixSet(expected);
- });
-}
-
-static void
-testFullUpdate(PrefixStringMap& add, nsCString* checksum)
-{
- nsTArray<TableUpdate*> tableUpdates;
-
- GenerateUpdateData(true, add, nullptr, checksum, tableUpdates);
-
- testUpdate(tableUpdates, add);
-}
-
-static void
-testPartialUpdate(PrefixStringMap& add,
- nsTArray<uint32_t>* removal,
- nsCString* checksum,
- PrefixStringMap& expected)
-{
- nsTArray<TableUpdate*> tableUpdates;
- GenerateUpdateData(false, add, removal, checksum, tableUpdates);
-
- testUpdate(tableUpdates, expected);
-}
-
-static void
-testOpenLookupCache()
-{
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
- file->AppendNative(GTEST_SAFEBROWSING_DIR);
-
- RunTestInNewThread([&] () -> void {
- LookupCacheV4 cache(nsCString(GTEST_TABLE), EmptyCString(), file);
- nsresult rv = cache.Init();
- ASSERT_EQ(rv, NS_OK);
-
- rv = cache.Open();
- ASSERT_EQ(rv, NS_OK);
- });
-}
-
-// Tests start from here.
-TEST(UrlClassifierTableUpdateV4, FixLenghtPSetFullUpdate)
-{
- srand(time(NULL));
-
- _PrefixArray array;
- PrefixStringMap map;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(5000, 4, 4, array);
- PrefixArrayToPrefixStringMap(array, map);
- CalculateCheckSum(array, checksum);
-
- testFullUpdate(map, &checksum);
-
- Clear();
-}
-
-TEST(UrlClassifierTableUpdateV4, VariableLenghtPSetFullUpdate)
-{
- _PrefixArray array;
- PrefixStringMap map;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(5000, 5, 32, array);
- PrefixArrayToPrefixStringMap(array, map);
- CalculateCheckSum(array, checksum);
-
- testFullUpdate(map, &checksum);
-
- Clear();
-}
-
-// This test contain both variable length prefix set and fixed-length prefix set
-TEST(UrlClassifierTableUpdateV4, MixedPSetFullUpdate)
-{
- _PrefixArray array;
- PrefixStringMap map;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(5000, 4, 4, array);
- CreateRandomSortedPrefixArray(1000, 5, 32, array);
- PrefixArrayToPrefixStringMap(array, map);
- CalculateCheckSum(array, checksum);
-
- testFullUpdate(map, &checksum);
-
- Clear();
-}
-
-TEST(UrlClassifierTableUpdateV4, PartialUpdateWithRemoval)
-{
- _PrefixArray fArray;
-
- // Apply a full update first.
- {
- PrefixStringMap fMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
- CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
- }
-
- // Apply a partial update with removal.
- {
- _PrefixArray pArray, mergedArray;
- PrefixStringMap pMap, mergedMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
- RemoveIntersection(fArray, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- // Remove 1/5 of elements of original prefix set.
- nsTArray<uint32_t> removal;
- CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
- RemoveElements(removal, fArray);
-
- // Calculate the expected prefix map.
- MergeAndSortArray(fArray, pArray, mergedArray);
- PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
- CalculateCheckSum(mergedArray, checksum);
-
- testPartialUpdate(pMap, &removal, &checksum, mergedMap);
- }
-
- Clear();
-}
-
-TEST(UrlClassifierTableUpdateV4, PartialUpdateWithoutRemoval)
-{
- _PrefixArray fArray;
-
- // Apply a full update first.
- {
- PrefixStringMap fMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
- CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
- }
-
- // Apply a partial update without removal
- {
- _PrefixArray pArray, mergedArray;
- PrefixStringMap pMap, mergedMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
- RemoveIntersection(fArray, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- // Calculate the expected prefix map.
- MergeAndSortArray(fArray, pArray, mergedArray);
- PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
- CalculateCheckSum(mergedArray, checksum);
-
- testPartialUpdate(pMap, nullptr, &checksum, mergedMap);
- }
-
- Clear();
-}
-
-// Expect failure because partial update contains prefix already
-// in old prefix set.
-TEST(UrlClassifierTableUpdateV4, PartialUpdatePrefixAlreadyExist)
-{
- _PrefixArray fArray;
-
- // Apply a full update fist.
- {
- PrefixStringMap fMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
- }
-
- // Apply a partial update which contains a prefix in previous full update.
- // This should cause an update error.
- {
- _PrefixArray pArray;
- PrefixStringMap pMap;
- nsTArray<TableUpdate*> tableUpdates;
-
- // Pick one prefix from full update prefix and add it to partial update.
- // This should result a failure when call ApplyUpdates.
- pArray.AppendElement(fArray[rand() % fArray.Length()]);
- CreateRandomSortedPrefixArray(200, 4, 32, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- GenerateUpdateData(false, pMap, nullptr, nullptr, tableUpdates);
- testUpdateFail(tableUpdates);
- }
-
- Clear();
-}
-
-// Test apply partial update directly without applying an full update first.
-TEST(UrlClassifierTableUpdateV4, OnlyPartialUpdate)
-{
- _PrefixArray pArray;
- PrefixStringMap pMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
- CalculateCheckSum(pArray, checksum);
-
- testPartialUpdate(pMap, nullptr, &checksum, pMap);
-
- Clear();
-}
-
-// Test partial update without any ADD prefixes, only removalIndices.
-TEST(UrlClassifierTableUpdateV4, PartialUpdateOnlyRemoval)
-{
- _PrefixArray fArray;
-
- // Apply a full update first.
- {
- PrefixStringMap fMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(5000, 4, 4, fArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
- }
-
- // Apply a partial update without add prefix, only contain removal indices.
- {
- _PrefixArray pArray;
- PrefixStringMap pMap, mergedMap;
- nsCString checksum;
-
- // Remove 1/5 of elements of original prefix set.
- nsTArray<uint32_t> removal;
- CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
- RemoveElements(removal, fArray);
-
- PrefixArrayToPrefixStringMap(fArray, mergedMap);
- CalculateCheckSum(fArray, checksum);
-
- testPartialUpdate(pMap, &removal, &checksum, mergedMap);
- }
-
- Clear();
-}
-
-// Test one tableupdate array contains full update and multiple partial updates.
-TEST(UrlClassifierTableUpdateV4, MultipleTableUpdates)
-{
- _PrefixArray fArray, pArray, mergedArray;
- PrefixStringMap fMap, pMap, mergedMap;
- nsCString checksum;
-
- nsTArray<TableUpdate*> tableUpdates;
-
- // Generate first full udpate
- CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
- CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- GenerateUpdateData(true, fMap, nullptr, &checksum, tableUpdates);
-
- // Generate second partial update
- CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
- RemoveIntersection(fArray, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- MergeAndSortArray(fArray, pArray, mergedArray);
- CalculateCheckSum(mergedArray, checksum);
-
- GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates);
-
- // Generate thrid partial update
- fArray.AppendElements(pArray);
- fArray.Sort();
- pArray.Clear();
- CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
- RemoveIntersection(fArray, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- // Remove 1/5 of elements of original prefix set.
- nsTArray<uint32_t> removal;
- CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
- RemoveElements(removal, fArray);
-
- MergeAndSortArray(fArray, pArray, mergedArray);
- PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
- CalculateCheckSum(mergedArray, checksum);
-
- GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
-
- testUpdate(tableUpdates, mergedMap);
-
- Clear();
-}
-
-// Test apply full update first, and then apply multiple partial updates
-// in one tableupdate array.
-TEST(UrlClassifierTableUpdateV4, MultiplePartialUpdateTableUpdates)
-{
- _PrefixArray fArray;
-
- // Apply a full update first
- {
- PrefixStringMap fMap;
- nsCString checksum;
-
- // Generate first full udpate
- CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
- CreateRandomSortedPrefixArray(3000, 5, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
- }
-
- // Apply multiple partial updates in one table update
- {
- _PrefixArray pArray, mergedArray;
- PrefixStringMap pMap, mergedMap;
- nsCString checksum;
- nsTArray<uint32_t> removal;
- nsTArray<TableUpdate*> tableUpdates;
-
- // Generate first partial update
- CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
- RemoveIntersection(fArray, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- // Remove 1/5 of elements of original prefix set.
- CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
- RemoveElements(removal, fArray);
-
- MergeAndSortArray(fArray, pArray, mergedArray);
- CalculateCheckSum(mergedArray, checksum);
-
- GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
-
- fArray.AppendElements(pArray);
- fArray.Sort();
- pArray.Clear();
- removal.Clear();
-
- // Generate second partial update.
- CreateRandomSortedPrefixArray(2000, 4, 4, pArray);
- CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
- RemoveIntersection(fArray, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- // Remove 1/5 of elements of original prefix set.
- CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
- RemoveElements(removal, fArray);
-
- MergeAndSortArray(fArray, pArray, mergedArray);
- PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
- CalculateCheckSum(mergedArray, checksum);
-
- GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
-
- testUpdate(tableUpdates, mergedMap);
- }
-
- Clear();
-}
-
-// Test removal indices are larger than the original prefix set.
-TEST(UrlClassifierTableUpdateV4, RemovalIndexTooLarge)
-{
- _PrefixArray fArray;
-
- // Apply a full update first
- {
- PrefixStringMap fMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
- }
-
- // Apply a partial update with removal indice array larger than
- // old prefix set(fArray). This should cause an error.
- {
- _PrefixArray pArray;
- PrefixStringMap pMap;
- nsTArray<uint32_t> removal;
- nsTArray<TableUpdate*> tableUpdates;
-
- CreateRandomSortedPrefixArray(200, 4, 32, pArray);
- RemoveIntersection(fArray, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- for (uint32_t i = 0; i < fArray.Length() + 1 ;i++) {
- removal.AppendElement(i);
- }
-
- GenerateUpdateData(false, pMap, &removal, nullptr, tableUpdates);
- testUpdateFail(tableUpdates);
- }
-
- Clear();
-}
-
-TEST(UrlClassifierTableUpdateV4, ChecksumMismatch)
-{
- // Apply a full update first
- {
- _PrefixArray fArray;
- PrefixStringMap fMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
- }
-
- // Apply a partial update with incorrect checksum
- {
- _PrefixArray pArray;
- PrefixStringMap pMap;
- nsCString checksum;
- nsTArray<TableUpdate*> tableUpdates;
-
- CreateRandomSortedPrefixArray(200, 4, 32, pArray);
- PrefixArrayToPrefixStringMap(pArray, pMap);
-
- // Checksum should be calculated with both old prefix set and add prefix set,
- // here we only calculate checksum with add prefix set to check if applyUpdate
- // will return failure.
- CalculateCheckSum(pArray, checksum);
-
- GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates);
- testUpdateFail(tableUpdates);
- }
-
- Clear();
-}
-
-TEST(UrlClassifierTableUpdateV4, ApplyUpdateThenLoad)
-{
- // Apply update with checksum
- {
- _PrefixArray fArray;
- PrefixStringMap fMap;
- nsCString checksum;
-
- CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
- CalculateCheckSum(fArray, checksum);
-
- testFullUpdate(fMap, &checksum);
-
- // Open lookup cache will load prefix set and verify the checksum
- testOpenLookupCache();
- }
-
- Clear();
-
- // Apply update without checksum
- {
- _PrefixArray fArray;
- PrefixStringMap fMap;
-
- CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
- PrefixArrayToPrefixStringMap(fArray, fMap);
-
- testFullUpdate(fMap, nullptr);
-
- testOpenLookupCache();
- }
-
- Clear();
-}
-
-// This test is used to avoid an eror from nsICryptoHash
-TEST(UrlClassifierTableUpdateV4, ApplyUpdateWithFixedChecksum)
-{
- _PrefixArray fArray = { _Prefix("enus"), _Prefix("apollo"), _Prefix("mars"),
- _Prefix("Hecatonchires cyclopes"),
- _Prefix("vesta"), _Prefix("neptunus"), _Prefix("jupiter"),
- _Prefix("diana"), _Prefix("minerva"), _Prefix("ceres"),
- _Prefix("Aidos,Adephagia,Adikia,Aletheia"),
- _Prefix("hecatonchires"), _Prefix("alcyoneus"), _Prefix("hades"),
- _Prefix("vulcanus"), _Prefix("juno"), _Prefix("mercury"),
- _Prefix("Stheno, Euryale and Medusa")
- };
- fArray.Sort();
-
- PrefixStringMap fMap;
- PrefixArrayToPrefixStringMap(fArray, fMap);
-
- nsCString checksum("\xae\x18\x94\xd7\xd0\x83\x5f\xc1"
- "\x58\x59\x5c\x2c\x72\xb9\x6e\x5e"
- "\xf4\xe8\x0a\x6b\xff\x5e\x6b\x81"
- "\x65\x34\x06\x16\x06\x59\xa0\x67");
-
- testFullUpdate(fMap, &checksum);
-
- // Open lookup cache will load prefix set and verify the checksum
- testOpenLookupCache();
-
- Clear();
-}
-
diff --git a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp
deleted file mode 100644
index fa5ce4f56..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp
+++ /dev/null
@@ -1,276 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include <stdio.h>
-#include <ctype.h>
-
-#include <mozilla/RefPtr.h>
-#include "nsString.h"
-#include "nsEscape.h"
-#include "nsUrlClassifierUtils.h"
-#include "stdlib.h"
-#include "gtest/gtest.h"
-
-static char int_to_hex_digit(int32_t i) {
- NS_ASSERTION((i >= 0) && (i <= 15), "int too big in int_to_hex_digit");
- return static_cast<char>(((i < 10) ? (i + '0') : ((i - 10) + 'A')));
-}
-
-static void CheckEquals(nsCString& expected, nsCString& actual)
-{
- ASSERT_TRUE((expected).Equals((actual)));
-}
-
-void TestUnescapeHelper(const char* in, const char* expected)
-{
- nsCString out, strIn(in), strExp(expected);
-
- NS_UnescapeURL(strIn.get(), strIn.Length(), esc_AlwaysCopy, out);
- CheckEquals(strExp, out);
-}
-
-// Make sure Unescape from nsEncode.h's unescape does what the server does.
-TEST(UrlClassifierUtils, Unescape)
-{
- // test empty string
- TestUnescapeHelper("\0", "\0");
-
- // Test docoding of all characters.
- nsCString allCharsEncoded, allCharsEncodedLowercase, allCharsAsString;
- for (int32_t i = 1; i < 256; ++i) {
- allCharsEncoded.Append('%');
- allCharsEncoded.Append(int_to_hex_digit(i / 16));
- allCharsEncoded.Append((int_to_hex_digit(i % 16)));
-
- allCharsEncodedLowercase.Append('%');
- allCharsEncodedLowercase.Append(tolower(int_to_hex_digit(i / 16)));
- allCharsEncodedLowercase.Append(tolower(int_to_hex_digit(i % 16)));
-
- allCharsAsString.Append(static_cast<char>(i));
- }
-
- nsCString out;
- NS_UnescapeURL(allCharsEncoded.get(),
- allCharsEncoded.Length(),
- esc_AlwaysCopy,
- out);
-
- CheckEquals(allCharsAsString, out);
-
- out.Truncate();
- NS_UnescapeURL(allCharsEncodedLowercase.get(),
- allCharsEncodedLowercase.Length(),
- esc_AlwaysCopy,
- out);
- CheckEquals(allCharsAsString, out);
-
- // Test %-related edge cases
- TestUnescapeHelper("%", "%");
- TestUnescapeHelper("%xx", "%xx");
- TestUnescapeHelper("%%", "%%");
- TestUnescapeHelper("%%%", "%%%");
- TestUnescapeHelper("%%%%", "%%%%");
- TestUnescapeHelper("%1", "%1");
- TestUnescapeHelper("%1z", "%1z");
- TestUnescapeHelper("a%1z", "a%1z");
- TestUnescapeHelper("abc%d%e%fg%hij%klmno%", "abc%d%e%fg%hij%klmno%");
-
- // A few more tests
- TestUnescapeHelper("%25", "%");
- TestUnescapeHelper("%25%32%35", "%25");
-}
-
-void TestEncodeHelper(const char* in, const char* expected)
-{
- nsCString out, strIn(in), strExp(expected);
- RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils;
- utils->Init();
-
- utils->SpecialEncode(strIn, true, out);
- CheckEquals(strExp, out);
-}
-
-TEST(UrlClassifierUtils, Enc)
-{
- // Test empty string
- TestEncodeHelper("", "");
-
- // Test that all characters we shouldn't encode ([33-36],[38,126]) are not.
- nsCString noenc;
- for (int32_t i = 33; i < 127; i++) {
- if (i != 37) { // skip %
- noenc.Append(static_cast<char>(i));
- }
- }
- RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils;
- utils->Init();
- nsCString out;
- utils->SpecialEncode(noenc, false, out);
- CheckEquals(noenc, out);
-
- // Test that all the chars that we should encode [0,32],37,[127,255] are
- nsCString yesAsString, yesExpectedString;
- for (int32_t i = 1; i < 256; i++) {
- if (i < 33 || i == 37 || i > 126) {
- yesAsString.Append(static_cast<char>(i));
- yesExpectedString.Append('%');
- yesExpectedString.Append(int_to_hex_digit(i / 16));
- yesExpectedString.Append(int_to_hex_digit(i % 16));
- }
- }
-
- out.Truncate();
- utils->SpecialEncode(yesAsString, false, out);
- CheckEquals(yesExpectedString, out);
-
- TestEncodeHelper("blah//blah", "blah/blah");
-}
-
-void TestCanonicalizeHelper(const char* in, const char* expected)
-{
- nsCString out, strIn(in), strExp(expected);
- RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils;
- utils->Init();
-
- utils->CanonicalizePath(strIn, out);
- CheckEquals(strExp, out);
-}
-
-TEST(UrlClassifierUtils, Canonicalize)
-{
- // Test repeated %-decoding. Note: %25 --> %, %32 --> 2, %35 --> 5
- TestCanonicalizeHelper("%25", "%25");
- TestCanonicalizeHelper("%25%32%35", "%25");
- TestCanonicalizeHelper("asdf%25%32%35asd", "asdf%25asd");
- TestCanonicalizeHelper("%%%25%32%35asd%%", "%25%25%25asd%25%25");
- TestCanonicalizeHelper("%25%32%35%25%32%35%25%32%35", "%25%25%25");
- TestCanonicalizeHelper("%25", "%25");
- TestCanonicalizeHelper("%257Ea%2521b%2540c%2523d%2524e%25f%255E00%252611%252A22%252833%252944_55%252B",
- "~a!b@c#d$e%25f^00&11*22(33)44_55+");
-
- TestCanonicalizeHelper("", "");
- TestCanonicalizeHelper("%31%36%38%2e%31%38%38%2e%39%39%2e%32%36/%2E%73%65%63%75%72%65/%77%77%77%2E%65%62%61%79%2E%63%6F%6D/",
- "168.188.99.26/.secure/www.ebay.com/");
- TestCanonicalizeHelper("195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/",
- "195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/");
- // Added in bug 489455. %00 should no longer be changed to %01.
- TestCanonicalizeHelper("%00", "%00");
-}
-
-void TestParseIPAddressHelper(const char *in, const char *expected)
-{
- nsCString out, strIn(in), strExp(expected);
- RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils;
- utils->Init();
-
- utils->ParseIPAddress(strIn, out);
- CheckEquals(strExp, out);
-}
-
-TEST(UrlClassifierUtils, ParseIPAddress)
-{
- TestParseIPAddressHelper("123.123.0.0.1", "");
- TestParseIPAddressHelper("255.0.0.1", "255.0.0.1");
- TestParseIPAddressHelper("12.0x12.01234", "12.18.2.156");
- TestParseIPAddressHelper("276.2.3", "20.2.0.3");
- TestParseIPAddressHelper("012.034.01.055", "10.28.1.45");
- TestParseIPAddressHelper("0x12.0x43.0x44.0x01", "18.67.68.1");
- TestParseIPAddressHelper("167838211", "10.1.2.3");
- TestParseIPAddressHelper("3279880203", "195.127.0.11");
- TestParseIPAddressHelper("0x12434401", "18.67.68.1");
- TestParseIPAddressHelper("413960661", "24.172.137.213");
- TestParseIPAddressHelper("03053104725", "24.172.137.213");
- TestParseIPAddressHelper("030.0254.0x89d5", "24.172.137.213");
- TestParseIPAddressHelper("1.234.4.0377", "1.234.4.255");
- TestParseIPAddressHelper("1.2.3.00x0", "");
- TestParseIPAddressHelper("10.192.95.89 xy", "10.192.95.89");
- TestParseIPAddressHelper("10.192.95.89 xyz", "");
- TestParseIPAddressHelper("1.2.3.0x0", "1.2.3.0");
- TestParseIPAddressHelper("1.2.3.4", "1.2.3.4");
-}
-
-void TestCanonicalNumHelper(const char *in, uint32_t bytes,
- bool allowOctal, const char *expected)
-{
- nsCString out, strIn(in), strExp(expected);
- RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils;
- utils->Init();
-
- utils->CanonicalNum(strIn, bytes, allowOctal, out);
- CheckEquals(strExp, out);
-}
-
-TEST(UrlClassifierUtils, CanonicalNum)
-{
- TestCanonicalNumHelper("", 1, true, "");
- TestCanonicalNumHelper("10", 0, true, "");
- TestCanonicalNumHelper("45", 1, true, "45");
- TestCanonicalNumHelper("0x10", 1, true, "16");
- TestCanonicalNumHelper("367", 2, true, "1.111");
- TestCanonicalNumHelper("012345", 3, true, "0.20.229");
- TestCanonicalNumHelper("0173", 1, true, "123");
- TestCanonicalNumHelper("09", 1, false, "9");
- TestCanonicalNumHelper("0x120x34", 2, true, "");
- TestCanonicalNumHelper("0x12fc", 2, true, "18.252");
- TestCanonicalNumHelper("3279880203", 4, true, "195.127.0.11");
- TestCanonicalNumHelper("0x0000059", 1, true, "89");
- TestCanonicalNumHelper("0x00000059", 1, true, "89");
- TestCanonicalNumHelper("0x0000067", 1, true, "103");
-}
-
-void TestHostnameHelper(const char *in, const char *expected)
-{
- nsCString out, strIn(in), strExp(expected);
- RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils;
- utils->Init();
-
- utils->CanonicalizeHostname(strIn, out);
- CheckEquals(strExp, out);
-}
-
-TEST(UrlClassifierUtils, Hostname)
-{
- TestHostnameHelper("abcd123;[]", "abcd123;[]");
- TestHostnameHelper("abc.123", "abc.123");
- TestHostnameHelper("abc..123", "abc.123");
- TestHostnameHelper("trailing.", "trailing");
- TestHostnameHelper("i love trailing dots....", "i%20love%20trailing%20dots");
- TestHostnameHelper(".leading", "leading");
- TestHostnameHelper("..leading", "leading");
- TestHostnameHelper(".dots.", "dots");
- TestHostnameHelper(".both.", "both");
- TestHostnameHelper(".both..", "both");
- TestHostnameHelper("..both.", "both");
- TestHostnameHelper("..both..", "both");
- TestHostnameHelper("..a.b.c.d..", "a.b.c.d");
- TestHostnameHelper("..127.0.0.1..", "127.0.0.1");
- TestHostnameHelper("asdf!@#$a", "asdf!@#$a");
- TestHostnameHelper("AB CD 12354", "ab%20cd%2012354");
- TestHostnameHelper("\1\2\3\4\112\177", "%01%02%03%04j%7F");
- TestHostnameHelper("<>.AS/-+", "<>.as/-+");
- // Added in bug 489455. %00 should no longer be changed to %01.
- TestHostnameHelper("%00", "%00");
-}
-
-TEST(UrlClassifierUtils, LongHostname)
-{
- static const int kTestSize = 1024 * 150;
- char *str = static_cast<char*>(malloc(kTestSize + 1));
- memset(str, 'x', kTestSize);
- str[kTestSize] = '\0';
-
- RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils;
- utils->Init();
-
- nsAutoCString out;
- nsDependentCString in(str);
- PRIntervalTime clockStart = PR_IntervalNow();
- utils->CanonicalizeHostname(in, out);
- PRIntervalTime clockEnd = PR_IntervalNow();
-
- CheckEquals(in, out);
-
- printf("CanonicalizeHostname on long string (%dms)\n",
- PR_IntervalToMilliseconds(clockEnd - clockStart));
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp b/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp
deleted file mode 100644
index 9e380a9d3..000000000
--- a/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp
+++ /dev/null
@@ -1,559 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include <mozilla/RefPtr.h>
-#include "nsString.h"
-#include "nsTArray.h"
-#include "nsClassHashtable.h"
-#include "VariableLengthPrefixSet.h"
-#include "nsAppDirectoryServiceDefs.h"
-#include "nsIFile.h"
-#include "gtest/gtest.h"
-
-using namespace mozilla::safebrowsing;
-
-typedef nsCString _Prefix;
-typedef nsTArray<_Prefix> _PrefixArray;
-
-// Create fullhash by appending random characters.
-static nsCString* CreateFullHash(const nsACString& in)
-{
- nsCString* out = new nsCString(in);
- out->SetLength(32);
- for (size_t i = in.Length(); i < 32; i++) {
- out->SetCharAt(char(rand() % 256), i);
- }
-
- return out;
-}
-
-// This function generate N prefixes with size between MIN and MAX.
-// The output array will not be cleared, random result will append to it
-static void RandomPrefixes(uint32_t N, uint32_t MIN, uint32_t MAX, _PrefixArray& array)
-{
- array.SetCapacity(array.Length() + N);
-
- uint32_t range = (MAX - MIN + 1);
-
- for (uint32_t i = 0; i < N; i++) {
- uint32_t prefixSize = (rand() % range) + MIN;
- _Prefix prefix;
- prefix.SetLength(prefixSize);
-
- bool added = false;
- while(!added) {
- char* dst = prefix.BeginWriting();
- for (uint32_t j = 0; j < prefixSize; j++) {
- dst[j] = rand() % 256;
- }
-
- if (!array.Contains(prefix)) {
- array.AppendElement(prefix);
- added = true;
- }
- }
- }
-}
-
-static void CheckContent(VariableLengthPrefixSet* pset,
- PrefixStringMap& expected)
-{
- PrefixStringMap vlPSetMap;
- pset->GetPrefixes(vlPSetMap);
-
- for (auto iter = vlPSetMap.Iter(); !iter.Done(); iter.Next()) {
- nsCString* expectedPrefix = expected.Get(iter.Key());
- nsCString* resultPrefix = iter.Data();
-
- ASSERT_TRUE(resultPrefix->Equals(*expectedPrefix));
- }
-}
-
-// This test loops through all the prefixes and converts each prefix to
-// fullhash by appending random characters, each converted fullhash
-// should at least match its original length in the prefixSet.
-static void DoExpectedLookup(VariableLengthPrefixSet* pset,
- _PrefixArray& array)
-{
- uint32_t matchLength = 0;
- for (uint32_t i = 0; i < array.Length(); i++) {
- const nsCString& prefix = array[i];
- UniquePtr<nsCString> fullhash(CreateFullHash(prefix));
-
- // Find match for prefix-generated full hash
- pset->Matches(*fullhash, &matchLength);
- MOZ_ASSERT(matchLength != 0);
-
- if (matchLength != prefix.Length()) {
- // Return match size is not the same as prefix size.
- // In this case it could be because the generated fullhash match other
- // prefixes, check if this prefix exist.
- bool found = false;
-
- for (uint32_t j = 0; j < array.Length(); j++) {
- if (array[j].Length() != matchLength) {
- continue;
- }
-
- if (0 == memcmp(fullhash->BeginReading(),
- array[j].BeginReading(),
- matchLength)) {
- found = true;
- break;
- }
- }
- ASSERT_TRUE(found);
- }
- }
-}
-
-static void DoRandomLookup(VariableLengthPrefixSet* pset,
- uint32_t N,
- _PrefixArray& array)
-{
- for (uint32_t i = 0; i < N; i++) {
- // Random 32-bytes test fullhash
- char buf[32];
- for (uint32_t j = 0; j < 32; j++) {
- buf[j] = (char)(rand() % 256);
- }
-
- // Get the expected result.
- nsTArray<uint32_t> expected;
- for (uint32_t j = 0; j < array.Length(); j++) {
- const nsACString& str = array[j];
- if (0 == memcmp(buf, str.BeginReading(), str.Length())) {
- expected.AppendElement(str.Length());
- }
- }
-
- uint32_t matchLength = 0;
- pset->Matches(nsDependentCSubstring(buf, 32), &matchLength);
-
- ASSERT_TRUE(expected.IsEmpty() ? !matchLength : expected.Contains(matchLength));
- }
-}
-
-static void SetupPrefixMap(const _PrefixArray& array,
- PrefixStringMap& map)
-{
- map.Clear();
-
- // Buckets are keyed by prefix length and contain an array of
- // all prefixes of that length.
- nsClassHashtable<nsUint32HashKey, _PrefixArray> table;
-
- for (uint32_t i = 0; i < array.Length(); i++) {
- _PrefixArray* prefixes = table.Get(array[i].Length());
- if (!prefixes) {
- prefixes = new _PrefixArray();
- table.Put(array[i].Length(), prefixes);
- }
-
- prefixes->AppendElement(array[i]);
- }
-
- // The resulting map entries will be a concatenation of all
- // prefix data for the prefixes of a given size.
- for (auto iter = table.Iter(); !iter.Done(); iter.Next()) {
- uint32_t size = iter.Key();
- uint32_t count = iter.Data()->Length();
-
- _Prefix* str = new _Prefix();
- str->SetLength(size * count);
-
- char* dst = str->BeginWriting();
-
- iter.Data()->Sort();
- for (uint32_t i = 0; i < count; i++) {
- memcpy(dst, iter.Data()->ElementAt(i).get(), size);
- dst += size;
- }
-
- map.Put(size, str);
- }
-}
-
-
-// Test setting prefix set with only 4-bytes prefixes
-TEST(VariableLengthPrefixSet, FixedLengthSet)
-{
- srand(time(nullptr));
-
- RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
- pset->Init(NS_LITERAL_CSTRING("test"));
-
- PrefixStringMap map;
- _PrefixArray array = { _Prefix("alph"), _Prefix("brav"), _Prefix("char"),
- _Prefix("delt"), _Prefix("echo"), _Prefix("foxt"),
- };
-
- SetupPrefixMap(array, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array);
-
- DoRandomLookup(pset, 1000, array);
-
- CheckContent(pset, map);
-
- // Run random test
- array.Clear();
- map.Clear();
-
- RandomPrefixes(1500, 4, 4, array);
-
- SetupPrefixMap(array, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array);
-
- DoRandomLookup(pset, 1000, array);
-
- CheckContent(pset, map);
-}
-
-// Test setting prefix set with only 5~32 bytes prefixes
-TEST(VariableLengthPrefixSet, VariableLengthSet)
-{
- RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
- pset->Init(NS_LITERAL_CSTRING("test"));
-
- PrefixStringMap map;
- _PrefixArray array = { _Prefix("bravo"), _Prefix("charlie"), _Prefix("delta"),
- _Prefix("EchoEchoEchoEchoEcho"), _Prefix("foxtrot"),
- _Prefix("GolfGolfGolfGolfGolfGolfGolfGolf"),
- _Prefix("hotel"), _Prefix("november"),
- _Prefix("oscar"), _Prefix("quebec"), _Prefix("romeo"),
- _Prefix("sierrasierrasierrasierrasierra"),
- _Prefix("Tango"), _Prefix("whiskey"), _Prefix("yankee"),
- _Prefix("ZuluZuluZuluZulu")
- };
-
- SetupPrefixMap(array, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array);
-
- DoRandomLookup(pset, 1000, array);
-
- CheckContent(pset, map);
-
- // Run random test
- array.Clear();
- map.Clear();
-
- RandomPrefixes(1500, 5, 32, array);
-
- SetupPrefixMap(array, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array);
-
- DoRandomLookup(pset, 1000, array);
-
- CheckContent(pset, map);
-
-}
-
-// Test setting prefix set with both 4-bytes prefixes and 5~32 bytes prefixes
-TEST(VariableLengthPrefixSet, MixedPrefixSet)
-{
- RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
- pset->Init(NS_LITERAL_CSTRING("test"));
-
- PrefixStringMap map;
- _PrefixArray array = { _Prefix("enus"), _Prefix("apollo"), _Prefix("mars"),
- _Prefix("Hecatonchires cyclopes"),
- _Prefix("vesta"), _Prefix("neptunus"), _Prefix("jupiter"),
- _Prefix("diana"), _Prefix("minerva"), _Prefix("ceres"),
- _Prefix("Aidos,Adephagia,Adikia,Aletheia"),
- _Prefix("hecatonchires"), _Prefix("alcyoneus"), _Prefix("hades"),
- _Prefix("vulcanus"), _Prefix("juno"), _Prefix("mercury"),
- _Prefix("Stheno, Euryale and Medusa")
- };
-
- SetupPrefixMap(array, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array);
-
- DoRandomLookup(pset, 1000, array);
-
- CheckContent(pset, map);
-
- // Run random test
- array.Clear();
- map.Clear();
-
- RandomPrefixes(1500, 4, 32, array);
-
- SetupPrefixMap(array, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array);
-
- DoRandomLookup(pset, 1000, array);
-
- CheckContent(pset, map);
-}
-
-// Test resetting prefix set
-TEST(VariableLengthPrefixSet, ResetPrefix)
-{
- RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
- pset->Init(NS_LITERAL_CSTRING("test"));
-
- // First prefix set
- _PrefixArray array1 = { _Prefix("Iceland"), _Prefix("Peru"), _Prefix("Mexico"),
- _Prefix("Australia"), _Prefix("Japan"), _Prefix("Egypt"),
- _Prefix("America"), _Prefix("Finland"), _Prefix("Germany"),
- _Prefix("Italy"), _Prefix("France"), _Prefix("Taiwan"),
- };
- {
- PrefixStringMap map;
-
- SetupPrefixMap(array1, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array1);
- }
-
- // Second
- _PrefixArray array2 = { _Prefix("Pikachu"), _Prefix("Bulbasaur"), _Prefix("Charmander"),
- _Prefix("Blastoise"), _Prefix("Pidgey"), _Prefix("Mewtwo"),
- _Prefix("Jigglypuff"), _Prefix("Persian"), _Prefix("Tentacool"),
- _Prefix("Onix"), _Prefix("Eevee"), _Prefix("Jynx"),
- };
- {
- PrefixStringMap map;
-
- SetupPrefixMap(array2, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array2);
- }
-
- // Should not match any of the first prefix set
- uint32_t matchLength = 0;
- for (uint32_t i = 0; i < array1.Length(); i++) {
- UniquePtr<nsACString> fullhash(CreateFullHash(array1[i]));
-
- pset->Matches(*fullhash, &matchLength);
- ASSERT_TRUE(matchLength == 0);
- }
-}
-
-// Test only set one 4-bytes prefix and one full-length prefix
-TEST(VariableLengthPrefixSet, TinyPrefixSet)
-{
- RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
- pset->Init(NS_LITERAL_CSTRING("test"));
-
- PrefixStringMap map;
- _PrefixArray array = { _Prefix("AAAA"),
- _Prefix("11112222333344445555666677778888"),
- };
-
- SetupPrefixMap(array, map);
- pset->SetPrefixes(map);
-
- DoExpectedLookup(pset, array);
-
- DoRandomLookup(pset, 1000, array);
-
- CheckContent(pset, map);
-}
-
-// Test empty prefix set and IsEmpty function
-TEST(VariableLengthPrefixSet, EmptyPrefixSet)
-{
- RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
- pset->Init(NS_LITERAL_CSTRING("test"));
-
- bool empty;
- pset->IsEmpty(&empty);
- ASSERT_TRUE(empty);
-
- PrefixStringMap map;
- _PrefixArray array1;
-
- // Lookup an empty array should never match
- DoRandomLookup(pset, 100, array1);
-
- // Insert an 4-bytes prefix, then IsEmpty should return false
- _PrefixArray array2 = { _Prefix("test") };
- SetupPrefixMap(array2, map);
- pset->SetPrefixes(map);
-
- pset->IsEmpty(&empty);
- ASSERT_TRUE(!empty);
-
- _PrefixArray array3 = { _Prefix("test variable length") };
-
- // Insert an 5~32 bytes prefix, then IsEmpty should return false
- SetupPrefixMap(array3, map);
- pset->SetPrefixes(map);
-
- pset->IsEmpty(&empty);
- ASSERT_TRUE(!empty);
-}
-
-// Test prefix size should only between 4~32 bytes
-TEST(VariableLengthPrefixSet, MinMaxPrefixSet)
-{
- RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
- pset->Init(NS_LITERAL_CSTRING("test"));
-
- PrefixStringMap map;
- {
- _PrefixArray array = { _Prefix("1234"),
- _Prefix("ABCDEFGHIJKKMNOP"),
- _Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh") };
-
- SetupPrefixMap(array, map);
- nsresult rv = pset->SetPrefixes(map);
- ASSERT_TRUE(rv == NS_OK);
- }
-
- // Prefix size less than 4-bytes should fail
- {
- _PrefixArray array = { _Prefix("123") };
-
- SetupPrefixMap(array, map);
- nsresult rv = pset->SetPrefixes(map);
- ASSERT_TRUE(NS_FAILED(rv));
- }
-
- // Prefix size greater than 32-bytes should fail
- {
- _PrefixArray array = { _Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh9") };
-
- SetupPrefixMap(array, map);
- nsresult rv = pset->SetPrefixes(map);
- ASSERT_TRUE(NS_FAILED(rv));
- }
-}
-
-// Test save then load prefix set with only 4-bytes prefixes
-TEST(VariableLengthPrefixSet, LoadSaveFixedLengthPrefixSet)
-{
- RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet;
- save->Init(NS_LITERAL_CSTRING("test-save"));
-
- _PrefixArray array;
- RandomPrefixes(10000, 4, 4, array);
-
- PrefixStringMap map;
- SetupPrefixMap(array, map);
- save->SetPrefixes(map);
-
- DoExpectedLookup(save, array);
-
- DoRandomLookup(save, 1000, array);
-
- CheckContent(save, map);
-
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
- getter_AddRefs(file));
- file->Append(NS_LITERAL_STRING("test.vlpset"));
-
- save->StoreToFile(file);
-
- RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
- load->Init(NS_LITERAL_CSTRING("test-load"));
-
- load->LoadFromFile(file);
-
- DoExpectedLookup(load, array);
-
- DoRandomLookup(load, 1000, array);
-
- CheckContent(load, map);
-
- file->Remove(false);
-}
-
-// Test save then load prefix set with only 5~32 bytes prefixes
-TEST(VariableLengthPrefixSet, LoadSaveVariableLengthPrefixSet)
-{
- RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet;
- save->Init(NS_LITERAL_CSTRING("test-save"));
-
- _PrefixArray array;
- RandomPrefixes(10000, 5, 32, array);
-
- PrefixStringMap map;
- SetupPrefixMap(array, map);
- save->SetPrefixes(map);
-
- DoExpectedLookup(save, array);
-
- DoRandomLookup(save, 1000, array);
-
- CheckContent(save, map);
-
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
- getter_AddRefs(file));
- file->Append(NS_LITERAL_STRING("test.vlpset"));
-
- save->StoreToFile(file);
-
- RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
- load->Init(NS_LITERAL_CSTRING("test-load"));
-
- load->LoadFromFile(file);
-
- DoExpectedLookup(load, array);
-
- DoRandomLookup(load, 1000, array);
-
- CheckContent(load, map);
-
- file->Remove(false);
-}
-
-// Test save then load prefix with both 4 bytes prefixes and 5~32 bytes prefixes
-TEST(VariableLengthPrefixSet, LoadSavePrefixSet)
-{
- RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet;
- save->Init(NS_LITERAL_CSTRING("test-save"));
-
- // Try to simulate the real case that most prefixes are 4bytes
- _PrefixArray array;
- RandomPrefixes(20000, 4, 4, array);
- RandomPrefixes(1000, 5, 32, array);
-
- PrefixStringMap map;
- SetupPrefixMap(array, map);
- save->SetPrefixes(map);
-
- DoExpectedLookup(save, array);
-
- DoRandomLookup(save, 1000, array);
-
- CheckContent(save, map);
-
- nsCOMPtr<nsIFile> file;
- NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
- getter_AddRefs(file));
- file->Append(NS_LITERAL_STRING("test.vlpset"));
-
- save->StoreToFile(file);
-
- RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
- load->Init(NS_LITERAL_CSTRING("test-load"));
-
- load->LoadFromFile(file);
-
- DoExpectedLookup(load, array);
-
- DoRandomLookup(load, 1000, array);
-
- CheckContent(load, map);
-
- file->Remove(false);
-}
diff --git a/toolkit/components/url-classifier/tests/gtest/moz.build b/toolkit/components/url-classifier/tests/gtest/moz.build
deleted file mode 100644
index e66af9024..000000000
--- a/toolkit/components/url-classifier/tests/gtest/moz.build
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-LOCAL_INCLUDES += [
- '../..',
-]
-
-UNIFIED_SOURCES += [
- 'Common.cpp',
- 'TestChunkSet.cpp',
- 'TestFailUpdate.cpp',
- 'TestLookupCacheV4.cpp',
- 'TestPerProviderDirectory.cpp',
- 'TestProtocolParser.cpp',
- 'TestRiceDeltaDecoder.cpp',
- 'TestSafebrowsingHash.cpp',
- 'TestSafeBrowsingProtobuf.cpp',
- 'TestTable.cpp',
- 'TestUrlClassifierTableUpdateV4.cpp',
- 'TestUrlClassifierUtils.cpp',
- 'TestVariableLengthPrefixSet.cpp',
-]
-
-FINAL_LIBRARY = 'xul-gtest'
diff --git a/toolkit/components/url-classifier/tests/jar.mn b/toolkit/components/url-classifier/tests/jar.mn
deleted file mode 100644
index 2264c2896..000000000
--- a/toolkit/components/url-classifier/tests/jar.mn
+++ /dev/null
@@ -1,2 +0,0 @@
-toolkit.jar:
- content/global/url-classifier/unittests.xul (unittests.xul)
diff --git a/toolkit/components/url-classifier/tests/mochitest/.eslintrc.js b/toolkit/components/url-classifier/tests/mochitest/.eslintrc.js
deleted file mode 100644
index 58b3df4a7..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/.eslintrc.js
+++ /dev/null
@@ -1,8 +0,0 @@
-"use strict";
-
-module.exports = {
- "extends": [
- "../../../../../testing/mochitest/mochitest.eslintrc.js",
- "../../../../../testing/mochitest/chrome.eslintrc.js"
- ]
-};
diff --git a/toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html b/toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html
deleted file mode 100644
index 9aae1b841..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html
+++ /dev/null
@@ -1,144 +0,0 @@
-<html>
-<head>
-<title></title>
-
-<script type="text/javascript">
-
-// Modified by evil.js
-var scriptItem;
-
-var scriptItem1 = "untouched";
-var imageItem1 = "untouched";
-var frameItem1 = "untouched";
-var scriptItem2 = "untouched";
-var imageItem2 = "untouched";
-var frameItem2 = "untouched";
-var xhrItem = "untouched";
-var fetchItem = "untouched";
-var mediaItem1 = "untouched";
-
-function checkLoads() {
- window.parent.is(scriptItem1, "spoiled", "Should not block tracking js 1");
- window.parent.is(scriptItem2, "spoiled", "Should not block tracking js 2");
- window.parent.is(imageItem1, "spoiled", "Should not block tracking img 1");
- window.parent.is(imageItem2, "spoiled", "Should not block tracking img 2");
- window.parent.is(frameItem1, "spoiled", "Should not block tracking iframe 1");
- window.parent.is(frameItem2, "spoiled", "Should not block tracking iframe 2");
- window.parent.is(mediaItem1, "loaded", "Should not block tracking video");
- window.parent.is(xhrItem, "loaded", "Should not block tracking XHR");
- window.parent.is(fetchItem, "loaded", "Should not block fetches from tracking domains");
- window.parent.is(window.document.blockedTrackingNodeCount, 0,
- "No elements should be blocked");
-
- // End (parent) test.
- window.parent.clearPermissions();
- window.parent.SimpleTest.finish();
-}
-
-var onloadCalled = false;
-var xhrFinished = false;
-var fetchFinished = false;
-var videoLoaded = false;
-function loaded(type) {
- if (type === "onload") {
- onloadCalled = true;
- } else if (type === "xhr") {
- xhrFinished = true;
- } else if (type === "fetch") {
- fetchFinished = true;
- } else if (type === "video") {
- videoLoaded = true;
- }
-
- if (onloadCalled && xhrFinished && fetchFinished && videoLoaded) {
- checkLoads();
- }
-}
-</script>
-
-</head>
-
-<body onload="loaded('onload')">
-
-<!-- Try loading from a tracking script URI (1) -->
-<script id="badscript1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="scriptItem1 = 'spoiled';"></script>
-
-<!-- Try loading from a tracking image URI (1) -->
-<img id="badimage1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg" onload="imageItem1 = 'spoiled';"/>
-
-<!-- Try loading from a tracking frame URI (1) -->
-<iframe id="badframe1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html" onload="frameItem1 = 'spoiled';"></iframe>
-
-<!-- Try loading from a tracking video URI -->
-<video id="badmedia1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/vp9.webm"></video>
-
-<script>
-var v = document.getElementById("badmedia1");
-v.addEventListener("loadedmetadata", function() {
- mediaItem1 = "loaded";
- loaded("video");
-}, true);
-v.addEventListener("error", function() {
- mediaItem1 = "error";
- loaded("video");
-}, true);
-
-// Try loading from a tracking script URI (2) - The loader may follow a
-// different path depending on whether the resource is loaded from JS or HTML.
-var newScript = document.createElement("script");
-newScript.id = "badscript2";
-newScript.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js";
-newScript.addEventListener("load", function onload() {scriptItem2 = 'spoiled';});
-document.body.appendChild(newScript);
-
-/// Try loading from a tracking image URI (2)
-var newImage = document.createElement("img");
-newImage.id = "badimage2";
-newImage.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg";
-newImage.addEventListener("load", function onload() {imageItem2 = 'spoiled'});
-document.body.appendChild(newImage);
-
-// Try loading from a tracking iframe URI (2)
-var newFrame = document.createElement("iframe");
-newFrame.id = "badframe2";
-newFrame.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html"
-newFrame.addEventListener("load", function onload() {frameItem2 = 'spoiled'});
-document.body.appendChild(newFrame);
-
-// Try doing an XHR against a tracking domain (bug 1216793)
-function reqListener() {
- xhrItem = "loaded";
- loaded("xhr");
-}
-function transferFailed() {
- xhrItem = "failed";
- loaded("xhr");
-}
-function transferCanceled() {
- xhrItem = "canceled";
- loaded("xhr");
-}
-var oReq = new XMLHttpRequest();
-oReq.addEventListener("load", reqListener);
-oReq.addEventListener("error", transferFailed);
-oReq.addEventListener("abort", transferCanceled);
-oReq.open("GET", "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js");
-oReq.send();
-
-// Fetch from a tracking domain
-fetch("http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js").then(function(response) {
- if(response.ok) {
- fetchItem = "loaded";
- loaded("fetch");
- } else {
- fetchItem = "badresponse";
- loaded("fetch");
- }
- }).catch(function(error) {
- fetchItem = "error";
- loaded("fetch");
-});
-</script>
-</body>
-</html>
-
diff --git a/toolkit/components/url-classifier/tests/mochitest/bad.css b/toolkit/components/url-classifier/tests/mochitest/bad.css
deleted file mode 100644
index f57b36a77..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/bad.css
+++ /dev/null
@@ -1 +0,0 @@
-#styleBad { visibility: hidden; }
diff --git a/toolkit/components/url-classifier/tests/mochitest/bad.css^headers^ b/toolkit/components/url-classifier/tests/mochitest/bad.css^headers^
deleted file mode 100644
index 4030ea1d3..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/bad.css^headers^
+++ /dev/null
@@ -1 +0,0 @@
-Cache-Control: no-store
diff --git a/toolkit/components/url-classifier/tests/mochitest/basic.vtt b/toolkit/components/url-classifier/tests/mochitest/basic.vtt
deleted file mode 100644
index 7781790d0..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/basic.vtt
+++ /dev/null
@@ -1,27 +0,0 @@
-WEBVTT
-Region: id=testOne lines=2 width=30%
-Region: id=testTwo lines=4 width=20%
-
-1
-00:00.500 --> 00:00.700 region:testOne
-This
-
-2
-00:01.200 --> 00:02.400 region:testTwo
-Is
-
-2.5
-00:02.000 --> 00:03.500 region:testOne
-(Over here?!)
-
-3
-00:02.710 --> 00:02.910
-A
-
-4
-00:03.217 --> 00:03.989
-Test
-
-5
-00:03.217 --> 00:03.989
-And more!
diff --git a/toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^ b/toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^
deleted file mode 100644
index 23de552c1..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^
+++ /dev/null
@@ -1 +0,0 @@
-Access-Control-Allow-Origin: * \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/mochitest/bug_1281083.html b/toolkit/components/url-classifier/tests/mochitest/bug_1281083.html
deleted file mode 100644
index cd5770177..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/bug_1281083.html
+++ /dev/null
@@ -1,35 +0,0 @@
-<html>
-<head>
-<title></title>
-
-<script type="text/javascript">
-
-var scriptItem = "untouched";
-
-function checkLoads() {
- // Make sure the javascript did not load.
- window.parent.is(scriptItem, "untouched", "Should not load bad javascript");
-
- // Call parent.loadTestFrame again to test classification metadata in HTTP
- // cache entries.
- if (window.parent.firstLoad) {
- window.parent.info("Reloading from cache...");
- window.parent.firstLoad = false;
- window.parent.loadTestFrame();
- return;
- }
-
- // End (parent) test.
- window.parent.SimpleTest.finish();
-}
-
-</script>
-
-<!-- Try loading from a malware javascript URI -->
-<script type="text/javascript" src="http://bug1281083.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js"></script>
-
-</head>
-
-<body onload="checkLoads()">
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/chrome.ini b/toolkit/components/url-classifier/tests/mochitest/chrome.ini
deleted file mode 100644
index 1652e7421..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/chrome.ini
+++ /dev/null
@@ -1,23 +0,0 @@
-[DEFAULT]
-skip-if = os == 'android'
-support-files =
- allowlistAnnotatedFrame.html
- classifiedAnnotatedFrame.html
- classifiedAnnotatedPBFrame.html
- bug_1281083.html
-
-[test_lookup_system_principal.html]
-[test_classified_annotations.html]
-tags = trackingprotection
-skip-if = os == 'linux' && asan # Bug 1202548
-[test_allowlisted_annotations.html]
-tags = trackingprotection
-[test_privatebrowsing_trackingprotection.html]
-tags = trackingprotection
-[test_trackingprotection_bug1157081.html]
-tags = trackingprotection
-[test_trackingprotection_whitelist.html]
-tags = trackingprotection
-[test_safebrowsing_bug1272239.html]
-[test_donottrack.html]
-[test_classifier_changetablepref.html]
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html b/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html
deleted file mode 100644
index 8aab13dd3..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html
+++ /dev/null
@@ -1,213 +0,0 @@
-<html>
-<head>
-<title></title>
-
-<script type="text/javascript">
-"use strict";
-
-var scriptItem = "untouched";
-var scriptItem1 = "untouched";
-var scriptItem2 = "untouched";
-var imageItem1 = "untouched";
-var imageItem2 = "untouched";
-var frameItem1 = "untouched";
-var frameItem2 = "untouched";
-var xhrItem = "untouched";
-var fetchItem = "untouched";
-var mediaItem1 = "untouched";
-
-var badids = [
- "badscript1",
- "badscript2",
- "badimage1",
- "badimage2",
- "badframe1",
- "badframe2",
- "badmedia1",
- "badcss"
-];
-
-function checkLoads() {
- window.parent.is(
- scriptItem1, "untouched", "Should not load tracking javascript");
- window.parent.is(
- scriptItem2, "untouched", "Should not load tracking javascript (2)");
-
- window.parent.is(
- imageItem1, "untouched", "Should not load tracking images");
- window.parent.is(
- imageItem2, "untouched", "Should not load tracking images (2)");
-
- window.parent.is(
- frameItem1, "untouched", "Should not load tracking iframes");
- window.parent.is(
- frameItem2, "untouched", "Should not load tracking iframes (2)");
- window.parent.is(
- mediaItem1, "error", "Should not load tracking videos");
- window.parent.is(
- xhrItem, "failed", "Should not load tracking XHRs");
- window.parent.is(
- fetchItem, "error", "Should not fetch from tracking URLs");
-
- var elt = document.getElementById("styleCheck");
- var style = document.defaultView.getComputedStyle(elt, "");
- window.parent.isnot(
- style.visibility, "hidden", "Should not load tracking css");
-
- window.parent.is(window.document.blockedTrackingNodeCount, badids.length,
- "Should identify all tracking elements");
-
- var blockedTrackingNodes = window.document.blockedTrackingNodes;
-
- // Make sure that every node in blockedTrackingNodes exists in the tree
- // (that may not always be the case but do not expect any nodes to disappear
- // from the tree here)
- var allNodeMatch = true;
- for (var i = 0; i < blockedTrackingNodes.length; i++) {
- var nodeMatch = false;
- for (var j = 0; j < badids.length && !nodeMatch; j++) {
- nodeMatch = nodeMatch ||
- (blockedTrackingNodes[i] == document.getElementById(badids[j]));
- }
-
- allNodeMatch = allNodeMatch && nodeMatch;
- }
- window.parent.ok(allNodeMatch,
- "All annotated nodes are expected in the tree");
-
- // Make sure that every node with a badid (see badids) is found in the
- // blockedTrackingNodes. This tells us if we are neglecting to annotate
- // some nodes
- allNodeMatch = true;
- for (var j = 0; j < badids.length; j++) {
- var nodeMatch = false;
- for (var i = 0; i < blockedTrackingNodes.length && !nodeMatch; i++) {
- nodeMatch = nodeMatch ||
- (blockedTrackingNodes[i] == document.getElementById(badids[j]));
- }
-
- if (!nodeMatch) {
- console.log(badids[j] + " was not found in blockedTrackingNodes");
- }
- allNodeMatch = allNodeMatch && nodeMatch;
- }
- window.parent.ok(allNodeMatch,
- "All tracking nodes are expected to be annotated as such");
-
- // Unset prefs, etc.
- window.parent.cleanup();
- // End (parent) test.
- window.parent.SimpleTest.finish();
-}
-
-var onloadCalled = false;
-var xhrFinished = false;
-var fetchFinished = false;
-var videoLoaded = false;
-function loaded(type) {
- if (type === "onload") {
- onloadCalled = true;
- } else if (type === "xhr") {
- xhrFinished = true;
- } else if (type === "fetch") {
- fetchFinished = true;
- } else if (type === "video") {
- videoLoaded = true;
- }
- if (onloadCalled && xhrFinished && fetchFinished && videoLoaded) {
- checkLoads();
- }
-}
-</script>
-
-<!-- Try loading from a tracking CSS URI -->
-<link id="badcss" rel="stylesheet" type="text/css" href="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
-
-</head>
-
-<body onload="loaded('onload')">
-
-<!-- Try loading from a tracking script URI (1): evil.js onload will have updated the scriptItem variable -->
-<script id="badscript1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="scriptItem1 = scriptItem;"></script>
-
-<!-- Try loading from a tracking image URI (1) -->
-<img id="badimage1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?reload=true" onload="imageItem1 = 'spoiled';"/>
-
-<!-- Try loading from a tracking frame URI (1) -->
-<iframe id="badframe1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html" onload="frameItem1 = 'spoiled';"></iframe>
-
-<!-- Try loading from a tracking video URI -->
-<video id="badmedia1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/vp9.webm?reload=true"></video>
-
-<script>
-var v = document.getElementById("badmedia1");
-v.addEventListener("loadedmetadata", function() {
- mediaItem1 = "loaded";
- loaded("video");
-}, true);
-v.addEventListener("error", function() {
- mediaItem1 = "error";
- loaded("video");
-}, true);
-
-// Try loading from a tracking script URI (2) - The loader may follow a different path depending on whether the resource is loaded from JS or HTML.
-var newScript = document.createElement("script");
-newScript.id = "badscript2";
-newScript.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js";
-newScript.addEventListener("load", function() {scriptItem2 = scriptItem;});
-document.body.appendChild(newScript);
-
-/// Try loading from a tracking image URI (2)
-var newImage = document.createElement("img");
-newImage.id = "badimage2";
-newImage.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?reload=true";
-newImage.addEventListener("load", function() {imageItem2 = 'spoiled'});
-document.body.appendChild(newImage);
-
-// Try loading from a tracking iframe URI (2)
-var newFrame = document.createElement("iframe");
-newFrame.id = "badframe2";
-newFrame.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html"
-newFrame.addEventListener("load", function() {frameItem2 = 'spoiled'});
-document.body.appendChild(newFrame);
-
-// Try doing an XHR against a tracking domain (bug 1216793)
-function reqListener() {
- xhrItem = "loaded";
- loaded("xhr");
-}
-function transferFailed() {
- xhrItem = "failed";
- loaded("xhr");
-}
-function transferCanceled() {
- xhrItem = "canceled";
- loaded("xhr");
-}
-var oReq = new XMLHttpRequest();
-oReq.addEventListener("load", reqListener);
-oReq.addEventListener("error", transferFailed);
-oReq.addEventListener("abort", transferCanceled);
-oReq.open("GET", "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js");
-oReq.send();
-
-// Fetch from a tracking domain
-fetch("http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js").then(function(response) {
- if(response.ok) {
- fetchItem = "loaded";
- loaded("fetch");
- } else {
- fetchItem = "badresponse";
- loaded("fetch");
- }
- }).catch(function(error) {
- fetchItem = "error";
- loaded("fetch");
-});
-</script>
-
-The following should not be hidden:
-<div id="styleCheck">STYLE TEST</div>
-
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html b/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html
deleted file mode 100644
index f11ec1de3..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html
+++ /dev/null
@@ -1,24 +0,0 @@
-<!DOCTYPE HTML>
-<!-- Any copyright is dedicated to the Public Domain.
- http://creativecommons.org/publicdomain/zero/1.0/ -->
-<html>
-<head>
-<title></title>
-
-<link id="badcss" rel="stylesheet" type="text/css" href="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
-
-</head>
-<body>
-
-<script id="badscript" data-touched="not sure" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
-
-<script id="goodscript" data-touched="not sure" src="http://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/good.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
-
-<!-- The image cache can cache JS handlers, so make sure we use a different URL for raptor.jpg each time -->
-<img id="badimage" data-touched="not sure" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?pbmode=test" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"/>
-
-The following should not be hidden:
-<div id="styleCheck">STYLE TEST</div>
-
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifierCommon.js b/toolkit/components/url-classifier/tests/mochitest/classifierCommon.js
deleted file mode 100644
index 49bda38db..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/classifierCommon.js
+++ /dev/null
@@ -1,112 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-const { classes: Cc, interfaces: Ci, results: Cr } = Components;
-
-var dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
- .getService(Ci.nsIUrlClassifierDBService);
-
-var timer;
-function setTimeout(callback, delay) {
- timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
- timer.initWithCallback({ notify: callback },
- delay,
- Ci.nsITimer.TYPE_ONE_SHOT);
-}
-
-function doUpdate(update) {
- let listener = {
- QueryInterface: function(iid)
- {
- if (iid.equals(Ci.nsISupports) ||
- iid.equals(Ci.nsIUrlClassifierUpdateObserver))
- return this;
-
- throw Cr.NS_ERROR_NO_INTERFACE;
- },
- updateUrlRequested: function(url) { },
- streamFinished: function(status) { },
- updateError: function(errorCode) {
- sendAsyncMessage("updateError", errorCode);
- },
- updateSuccess: function(requestedTimeout) {
- sendAsyncMessage("updateSuccess");
- }
- };
-
- let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
- .getService(Ci.nsIUrlClassifierDBService);
-
- try {
- dbService.beginUpdate(listener, "test-malware-simple,test-unwanted-simple", "");
- dbService.beginStream("", "");
- dbService.updateStream(update);
- dbService.finishStream();
- dbService.finishUpdate();
- } catch(e) {
- // beginUpdate may fail if there's an existing update in progress
- // retry until success or testcase timeout.
- setTimeout(() => { doUpdate(update); }, 1000);
- }
-}
-
-function doReload() {
- dbService.reloadDatabase();
-
- sendAsyncMessage("reloadSuccess");
-}
-
-// SafeBrowsing.jsm is initialized after mozEntries are added. Add observer
-// to receive "finished" event. For the case when this function is called
-// after the event had already been notified, we lookup entries to see if
-// they are already added to database.
-function waitForInit() {
- let observerService = Cc["@mozilla.org/observer-service;1"]
- .getService(Ci.nsIObserverService);
-
- observerService.addObserver(function() {
- sendAsyncMessage("safeBrowsingInited");
- }, "mozentries-update-finished", false);
-
- // This url must sync with the table, url in SafeBrowsing.jsm addMozEntries
- const table = "test-phish-simple";
- const url = "http://itisatrap.org/firefox/its-a-trap.html";
-
- let secMan = Cc["@mozilla.org/scriptsecuritymanager;1"]
- .getService(Ci.nsIScriptSecurityManager);
- let iosvc = Cc["@mozilla.org/network/io-service;1"]
- .getService(Ci.nsIIOService);
-
- let principal = secMan.createCodebasePrincipal(
- iosvc.newURI(url, null, null), {});
-
- let listener = {
- QueryInterface: function(iid)
- {
- if (iid.equals(Ci.nsISupports) ||
- iid.equals(Ci.nsIUrlClassifierUpdateObserver))
- return this;
- throw Cr.NS_ERROR_NO_INTERFACE;
- },
-
- handleEvent: function(value)
- {
- if (value === table) {
- sendAsyncMessage("safeBrowsingInited");
- }
- },
- };
- dbService.lookup(principal, table, listener);
-}
-
-addMessageListener("doUpdate", ({ testUpdate }) => {
- doUpdate(testUpdate);
-});
-
-addMessageListener("doReload", () => {
- doReload();
-});
-
-addMessageListener("waitForInit", () => {
- waitForInit();
-});
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifierFrame.html b/toolkit/components/url-classifier/tests/mochitest/classifierFrame.html
deleted file mode 100644
index c7923f448..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/classifierFrame.html
+++ /dev/null
@@ -1,57 +0,0 @@
-<html>
-<head>
-<title></title>
-
-<script type="text/javascript">
-
-var scriptItem = "untouched";
-
-function checkLoads() {
- // Make sure the javascript did not load.
- window.parent.is(scriptItem, "untouched", "Should not load bad javascript");
-
- // Make sure the css did not load.
- var elt = document.getElementById("styleCheck");
- var style = document.defaultView.getComputedStyle(elt, "");
- window.parent.isnot(style.visibility, "hidden", "Should not load bad css");
-
- elt = document.getElementById("styleBad");
- style = document.defaultView.getComputedStyle(elt, "");
- window.parent.isnot(style.visibility, "hidden", "Should not load bad css");
-
- elt = document.getElementById("styleImport");
- style = document.defaultView.getComputedStyle(elt, "");
- window.parent.isnot(style.visibility, "visible", "Should import clean css");
-
- // Call parent.loadTestFrame again to test classification metadata in HTTP
- // cache entries.
- if (window.parent.firstLoad) {
- window.parent.info("Reloading from cache...");
- window.parent.firstLoad = false;
- window.parent.loadTestFrame();
- return;
- }
-
- // End (parent) test.
- window.parent.SimpleTest.finish();
-}
-
-</script>
-
-<!-- Try loading from a malware javascript URI -->
-<script type="text/javascript" src="http://malware.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js"></script>
-
-<!-- Try loading from an uwanted software css URI -->
-<link rel="stylesheet" type="text/css" href="http://unwanted.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
-
-<!-- Try loading a marked-as-malware css through an @import from a clean URI -->
-<link rel="stylesheet" type="text/css" href="import.css"></link>
-</head>
-
-<body onload="checkLoads()">
-The following should not be hidden:
-<div id="styleCheck">STYLE TEST</div>
-<div id="styleBad">STYLE BAD</div>
-<div id="styleImport">STYLE IMPORT</div>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifierHelper.js b/toolkit/components/url-classifier/tests/mochitest/classifierHelper.js
deleted file mode 100644
index 973f0c2c4..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/classifierHelper.js
+++ /dev/null
@@ -1,201 +0,0 @@
-if (typeof(classifierHelper) == "undefined") {
- var classifierHelper = {};
-}
-
-const CLASSIFIER_COMMON_URL = SimpleTest.getTestFileURL("classifierCommon.js");
-var gScript = SpecialPowers.loadChromeScript(CLASSIFIER_COMMON_URL);
-
-const ADD_CHUNKNUM = 524;
-const SUB_CHUNKNUM = 523;
-const HASHLEN = 32;
-
-const PREFS = {
- PROVIDER_LISTS : "browser.safebrowsing.provider.mozilla.lists",
- DISALLOW_COMPLETIONS : "urlclassifier.disallow_completions",
- PROVIDER_GETHASHURL : "browser.safebrowsing.provider.mozilla.gethashURL"
-};
-
-// addUrlToDB & removeUrlFromDB are asynchronous, queue the task to ensure
-// the callback follow correct order.
-classifierHelper._updates = [];
-
-// Keep urls added to database, those urls should be automatically
-// removed after test complete.
-classifierHelper._updatesToCleanup = [];
-
-classifierHelper._initsCB = [];
-
-// This function return a Promise, promise is resolved when SafeBrowsing.jsm
-// is initialized.
-classifierHelper.waitForInit = function() {
- return new Promise(function(resolve, reject) {
- classifierHelper._initsCB.push(resolve);
- gScript.sendAsyncMessage("waitForInit");
- });
-}
-
-// This function is used to allow completion for specific "list",
-// some lists like "test-malware-simple" is default disabled to ask for complete.
-// "list" is the db we would like to allow it
-// "url" is the completion server
-classifierHelper.allowCompletion = function(lists, url) {
- for (var list of lists) {
- // Add test db to provider
- var pref = SpecialPowers.getCharPref(PREFS.PROVIDER_LISTS);
- pref += "," + list;
- SpecialPowers.setCharPref(PREFS.PROVIDER_LISTS, pref);
-
- // Rename test db so we will not disallow it from completions
- pref = SpecialPowers.getCharPref(PREFS.DISALLOW_COMPLETIONS);
- pref = pref.replace(list, list + "-backup");
- SpecialPowers.setCharPref(PREFS.DISALLOW_COMPLETIONS, pref);
- }
-
- // Set get hash url
- SpecialPowers.setCharPref(PREFS.PROVIDER_GETHASHURL, url);
-}
-
-// Pass { url: ..., db: ... } to add url to database,
-// onsuccess/onerror will be called when update complete.
-classifierHelper.addUrlToDB = function(updateData) {
- return new Promise(function(resolve, reject) {
- var testUpdate = "";
- for (var update of updateData) {
- var LISTNAME = update.db;
- var CHUNKDATA = update.url;
- var CHUNKLEN = CHUNKDATA.length;
- var HASHLEN = update.len ? update.len : 32;
-
- classifierHelper._updatesToCleanup.push(update);
- testUpdate +=
- "n:1000\n" +
- "i:" + LISTNAME + "\n" +
- "ad:1\n" +
- "a:" + ADD_CHUNKNUM + ":" + HASHLEN + ":" + CHUNKLEN + "\n" +
- CHUNKDATA;
- }
-
- classifierHelper._update(testUpdate, resolve, reject);
- });
-}
-
-// Pass { url: ..., db: ... } to remove url from database,
-// onsuccess/onerror will be called when update complete.
-classifierHelper.removeUrlFromDB = function(updateData) {
- return new Promise(function(resolve, reject) {
- var testUpdate = "";
- for (var update of updateData) {
- var LISTNAME = update.db;
- var CHUNKDATA = ADD_CHUNKNUM + ":" + update.url;
- var CHUNKLEN = CHUNKDATA.length;
- var HASHLEN = update.len ? update.len : 32;
-
- testUpdate +=
- "n:1000\n" +
- "i:" + LISTNAME + "\n" +
- "s:" + SUB_CHUNKNUM + ":" + HASHLEN + ":" + CHUNKLEN + "\n" +
- CHUNKDATA;
- }
-
- classifierHelper._updatesToCleanup =
- classifierHelper._updatesToCleanup.filter((v) => {
- return updateData.indexOf(v) == -1;
- });
-
- classifierHelper._update(testUpdate, resolve, reject);
- });
-};
-
-// This API is used to expire all add/sub chunks we have updated
-// by using addUrlToDB and removeUrlFromDB.
-classifierHelper.resetDB = function() {
- return new Promise(function(resolve, reject) {
- var testUpdate = "";
- for (var update of classifierHelper._updatesToCleanup) {
- if (testUpdate.includes(update.db))
- continue;
-
- testUpdate +=
- "n:1000\n" +
- "i:" + update.db + "\n" +
- "ad:" + ADD_CHUNKNUM + "\n" +
- "sd:" + SUB_CHUNKNUM + "\n"
- }
-
- classifierHelper._update(testUpdate, resolve, reject);
- });
-};
-
-classifierHelper.reloadDatabase = function() {
- return new Promise(function(resolve, reject) {
- gScript.addMessageListener("reloadSuccess", function handler() {
- gScript.removeMessageListener('reloadSuccess', handler);
- resolve();
- });
-
- gScript.sendAsyncMessage("doReload");
- });
-}
-
-classifierHelper._update = function(testUpdate, onsuccess, onerror) {
- // Queue the task if there is still an on-going update
- classifierHelper._updates.push({"data": testUpdate,
- "onsuccess": onsuccess,
- "onerror": onerror});
- if (classifierHelper._updates.length != 1) {
- return;
- }
-
- gScript.sendAsyncMessage("doUpdate", { testUpdate });
-};
-
-classifierHelper._updateSuccess = function() {
- var update = classifierHelper._updates.shift();
- update.onsuccess();
-
- if (classifierHelper._updates.length) {
- var testUpdate = classifierHelper._updates[0].data;
- gScript.sendAsyncMessage("doUpdate", { testUpdate });
- }
-};
-
-classifierHelper._updateError = function(errorCode) {
- var update = classifierHelper._updates.shift();
- update.onerror(errorCode);
-
- if (classifierHelper._updates.length) {
- var testUpdate = classifierHelper._updates[0].data;
- gScript.sendAsyncMessage("doUpdate", { testUpdate });
- }
-};
-
-classifierHelper._inited = function() {
- classifierHelper._initsCB.forEach(function (cb) {
- cb();
- });
- classifierHelper._initsCB = [];
-};
-
-classifierHelper._setup = function() {
- gScript.addMessageListener("updateSuccess", classifierHelper._updateSuccess);
- gScript.addMessageListener("updateError", classifierHelper._updateError);
- gScript.addMessageListener("safeBrowsingInited", classifierHelper._inited);
-
- // cleanup will be called at end of each testcase to remove all the urls added to database.
- SimpleTest.registerCleanupFunction(classifierHelper._cleanup);
-};
-
-classifierHelper._cleanup = function() {
- // clean all the preferences may touch by helper
- for (var pref in PREFS) {
- SpecialPowers.clearUserPref(pref);
- }
-
- if (!classifierHelper._updatesToCleanup) {
- return Promise.resolve();
- }
-
- return classifierHelper.resetDB();
-};
-
-classifierHelper._setup();
diff --git a/toolkit/components/url-classifier/tests/mochitest/cleanWorker.js b/toolkit/components/url-classifier/tests/mochitest/cleanWorker.js
deleted file mode 100644
index 685648373..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/cleanWorker.js
+++ /dev/null
@@ -1,10 +0,0 @@
-onmessage = function() {
- try {
- importScripts("evilWorker.js");
- } catch(ex) {
- postMessage("success");
- return;
- }
-
- postMessage("failure");
-};
diff --git a/toolkit/components/url-classifier/tests/mochitest/dnt.html b/toolkit/components/url-classifier/tests/mochitest/dnt.html
deleted file mode 100644
index effc3a4f8..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/dnt.html
+++ /dev/null
@@ -1,31 +0,0 @@
-<html>
-<head>
-<title></title>
-
-<script type="text/javascript">
-
-function makeXHR(url, callback) {
- var xhr = new XMLHttpRequest();
- xhr.open('GET', url, true);
- xhr.onload = function() {
- callback(xhr.response);
- };
- xhr.send();
-}
-
-function loaded(type) {
- window.parent.postMessage("navigator.doNotTrack=" + navigator.doNotTrack, "*");
-
- makeXHR("dnt.sjs", (res) => {
- window.parent.postMessage("DNT=" + res, "*");
- window.parent.postMessage("finish", "*");
- });
-}
-
-</script>
-</head>
-
-<body onload="loaded('onload')">
-</body>
-
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/dnt.sjs b/toolkit/components/url-classifier/tests/mochitest/dnt.sjs
deleted file mode 100644
index bbb836482..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/dnt.sjs
+++ /dev/null
@@ -1,9 +0,0 @@
-function handleRequest(request, response) {
- var dnt = "unspecified";
- if (request.hasHeader("DNT")) {
- dnt = "1";
- }
-
- response.setHeader("Content-Type", "text/plain", false);
- response.write(dnt);
-}
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.css b/toolkit/components/url-classifier/tests/mochitest/evil.css
deleted file mode 100644
index f6f08d7c5..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/evil.css
+++ /dev/null
@@ -1 +0,0 @@
-#styleCheck { visibility: hidden; } \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.css^headers^ b/toolkit/components/url-classifier/tests/mochitest/evil.css^headers^
deleted file mode 100644
index 4030ea1d3..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/evil.css^headers^
+++ /dev/null
@@ -1 +0,0 @@
-Cache-Control: no-store
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.js b/toolkit/components/url-classifier/tests/mochitest/evil.js
deleted file mode 100644
index 27f2e8c43..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/evil.js
+++ /dev/null
@@ -1 +0,0 @@
-scriptItem = "loaded malware javascript!";
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.js^headers^ b/toolkit/components/url-classifier/tests/mochitest/evil.js^headers^
deleted file mode 100644
index 3eced9614..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/evil.js^headers^
+++ /dev/null
@@ -1,2 +0,0 @@
-Access-Control-Allow-Origin: *
-Cache-Control: no-store
diff --git a/toolkit/components/url-classifier/tests/mochitest/evilWorker.js b/toolkit/components/url-classifier/tests/mochitest/evilWorker.js
deleted file mode 100644
index ac34977d7..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/evilWorker.js
+++ /dev/null
@@ -1,3 +0,0 @@
-onmessage = function() {
- postMessage("loaded bad file");
-}
diff --git a/toolkit/components/url-classifier/tests/mochitest/gethash.sjs b/toolkit/components/url-classifier/tests/mochitest/gethash.sjs
deleted file mode 100644
index 9dcc6e0d5..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/gethash.sjs
+++ /dev/null
@@ -1,130 +0,0 @@
-const CC = Components.Constructor;
-const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
- "nsIBinaryInputStream",
- "setInputStream");
-
-function handleRequest(request, response)
-{
- var query = {};
- request.queryString.split('&').forEach(function (val) {
- var idx = val.indexOf('=');
- query[val.slice(0, idx)] = unescape(val.slice(idx + 1));
- });
-
- var responseBody;
-
- // Store fullhash in the server side.
- if ("list" in query && "fullhash" in query) {
- // In the server side we will store:
- // 1. All the full hashes for a given list
- // 2. All the lists we have right now
- // data is separate by '\n'
- let list = query["list"];
- let hashes = getState(list);
-
- let hash = base64ToString(query["fullhash"]);
- hashes += hash + "\n";
- setState(list, hashes);
-
- let lists = getState("lists");
- if (lists.indexOf(list) == -1) {
- lists += list + "\n";
- setState("lists", lists);
- }
-
- return;
- // gethash count return how many gethash request received.
- // This is used by client to know if a gethash request is triggered by gecko
- } else if ("gethashcount" == request.queryString) {
- var counter = getState("counter");
- responseBody = counter == "" ? "0" : counter;
- } else {
- var body = new BinaryInputStream(request.bodyInputStream);
- var avail;
- var bytes = [];
-
- while ((avail = body.available()) > 0) {
- Array.prototype.push.apply(bytes, body.readByteArray(avail));
- }
-
- var counter = getState("counter");
- counter = counter == "" ? "1" : (parseInt(counter) + 1).toString();
- setState("counter", counter);
-
- responseBody = parseV2Request(bytes);
- }
-
- response.setHeader("Content-Type", "text/plain", false);
- response.write(responseBody);
-
-}
-
-function parseV2Request(bytes) {
- var request = String.fromCharCode.apply(this, bytes);
- var [HEADER, PREFIXES] = request.split("\n");
- var [PREFIXSIZE, LENGTH] = HEADER.split(":").map(val => {
- return parseInt(val);
- });
-
- var ret = "";
- for(var start = 0; start < LENGTH; start += PREFIXSIZE) {
- getState("lists").split("\n").forEach(function(list) {
- var completions = getState(list).split("\n");
-
- for (var completion of completions) {
- if (completion.indexOf(PREFIXES.substr(start, PREFIXSIZE)) == 0) {
- ret += list + ":" + "1" + ":" + "32" + "\n";
- ret += completion;
- }
- }
- });
- }
-
- return ret;
-}
-
-/* Convert Base64 data to a string */
-const toBinaryTable = [
- -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
- -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
- -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,62, -1,-1,-1,63,
- 52,53,54,55, 56,57,58,59, 60,61,-1,-1, -1, 0,-1,-1,
- -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14,
- 15,16,17,18, 19,20,21,22, 23,24,25,-1, -1,-1,-1,-1,
- -1,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40,
- 41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1
-];
-const base64Pad = '=';
-
-function base64ToString(data) {
- var result = '';
- var leftbits = 0; // number of bits decoded, but yet to be appended
- var leftdata = 0; // bits decoded, but yet to be appended
-
- // Convert one by one.
- for (var i = 0; i < data.length; i++) {
- var c = toBinaryTable[data.charCodeAt(i) & 0x7f];
- var padding = (data[i] == base64Pad);
- // Skip illegal characters and whitespace
- if (c == -1) continue;
-
- // Collect data into leftdata, update bitcount
- leftdata = (leftdata << 6) | c;
- leftbits += 6;
-
- // If we have 8 or more bits, append 8 bits to the result
- if (leftbits >= 8) {
- leftbits -= 8;
- // Append if not padding.
- if (!padding)
- result += String.fromCharCode((leftdata >> leftbits) & 0xff);
- leftdata &= (1 << leftbits) - 1;
- }
- }
-
- // If there are any bits left, the base64 string was corrupted
- if (leftbits)
- throw Components.Exception('Corrupted base64 string');
-
- return result;
-}
diff --git a/toolkit/components/url-classifier/tests/mochitest/gethashFrame.html b/toolkit/components/url-classifier/tests/mochitest/gethashFrame.html
deleted file mode 100644
index 560ddcde6..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/gethashFrame.html
+++ /dev/null
@@ -1,62 +0,0 @@
-<html>
-<head>
-<title></title>
-
-<script type="text/javascript">
-
-var scriptItem = "untouched";
-
-function checkLoads() {
-
- var title = document.getElementById("title");
- title.innerHTML = window.parent.shouldLoad ?
- "The following should be hidden:" :
- "The following should not be hidden:"
-
- if (window.parent.shouldLoad) {
- window.parent.is(scriptItem, "loaded malware javascript!", "Should load bad javascript");
- } else {
- window.parent.is(scriptItem, "untouched", "Should not load bad javascript");
- }
-
- var elt = document.getElementById("styleImport");
- var style = document.defaultView.getComputedStyle(elt, "");
- window.parent.isnot(style.visibility, "visible", "Should load clean css");
-
- // Make sure the css did not load.
- elt = document.getElementById("styleCheck");
- style = document.defaultView.getComputedStyle(elt, "");
- if (window.parent.shouldLoad) {
- window.parent.isnot(style.visibility, "visible", "Should load bad css");
- } else {
- window.parent.isnot(style.visibility, "hidden", "Should not load bad css");
- }
-
- elt = document.getElementById("styleBad");
- style = document.defaultView.getComputedStyle(elt, "");
- if (window.parent.shouldLoad) {
- window.parent.isnot(style.visibility, "visible", "Should import bad css");
- } else {
- window.parent.isnot(style.visibility, "hidden", "Should not import bad css");
- }
-}
-
-</script>
-
-<!-- Try loading from a malware javascript URI -->
-<script type="text/javascript" src="http://malware.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js"></script>
-
-<!-- Try loading from an uwanted software css URI -->
-<link rel="stylesheet" type="text/css" href="http://unwanted.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
-
-<!-- Try loading a marked-as-malware css through an @import from a clean URI -->
-<link rel="stylesheet" type="text/css" href="import.css"></link>
-</head>
-
-<body onload="checkLoads()">
-<div id="title"></div>
-<div id="styleCheck">STYLE EVIL</div>
-<div id="styleBad">STYLE BAD</div>
-<div id="styleImport">STYLE IMPORT</div>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/good.js b/toolkit/components/url-classifier/tests/mochitest/good.js
deleted file mode 100644
index 015b9fe52..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/good.js
+++ /dev/null
@@ -1 +0,0 @@
-scriptItem = "loaded whitelisted javascript!";
diff --git a/toolkit/components/url-classifier/tests/mochitest/import.css b/toolkit/components/url-classifier/tests/mochitest/import.css
deleted file mode 100644
index 9b86c8216..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/import.css
+++ /dev/null
@@ -1,3 +0,0 @@
-/* malware.example.com is in the malware database. */
-@import url("http://malware.example.com/tests/toolkit/components/url-classifier/tests/mochitest/bad.css");
-#styleImport { visibility: hidden; }
diff --git a/toolkit/components/url-classifier/tests/mochitest/mochitest.ini b/toolkit/components/url-classifier/tests/mochitest/mochitest.ini
deleted file mode 100644
index c5679e86b..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/mochitest.ini
+++ /dev/null
@@ -1,39 +0,0 @@
-[DEFAULT]
-support-files =
- classifiedAnnotatedPBFrame.html
- classifierCommon.js
- classifierFrame.html
- classifierHelper.js
- cleanWorker.js
- good.js
- evil.css
- evil.css^headers^
- evil.js
- evil.js^headers^
- evilWorker.js
- import.css
- raptor.jpg
- track.html
- unwantedWorker.js
- vp9.webm
- whitelistFrame.html
- workerFrame.html
- ping.sjs
- basic.vtt
- basic.vtt^headers^
- dnt.html
- dnt.sjs
- update.sjs
- bad.css
- bad.css^headers^
- gethash.sjs
- gethashFrame.html
- seek.webm
-
-[test_classifier.html]
-skip-if = (os == 'linux' && debug) #Bug 1199778
-[test_classifier_worker.html]
-[test_classify_ping.html]
-[test_classify_track.html]
-[test_gethash.html]
-[test_bug1254766.html]
diff --git a/toolkit/components/url-classifier/tests/mochitest/ping.sjs b/toolkit/components/url-classifier/tests/mochitest/ping.sjs
deleted file mode 100644
index 37a78956e..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/ping.sjs
+++ /dev/null
@@ -1,16 +0,0 @@
-function handleRequest(request, response)
-{
- var query = {};
- request.queryString.split('&').forEach(function (val) {
- var [name, value] = val.split('=');
- query[name] = unescape(value);
- });
-
- if (request.method == "POST") {
- setState(query["id"], "ping");
- } else {
- var value = getState(query["id"]);
- response.setHeader("Content-Type", "text/plain", false);
- response.write(value);
- }
-}
diff --git a/toolkit/components/url-classifier/tests/mochitest/raptor.jpg b/toolkit/components/url-classifier/tests/mochitest/raptor.jpg
deleted file mode 100644
index 243ba9e2d..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/raptor.jpg
+++ /dev/null
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/mochitest/seek.webm b/toolkit/components/url-classifier/tests/mochitest/seek.webm
deleted file mode 100644
index 72b029723..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/seek.webm
+++ /dev/null
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html b/toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html
deleted file mode 100644
index ba9c86f95..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html
+++ /dev/null
@@ -1,56 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Test the URI Classifier</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-Components.utils.import("resource://testing-common/UrlClassifierTestUtils.jsm");
-
-// Add https://allowlisted.example.com to the permissions manager
-SpecialPowers.addPermission("trackingprotection",
- Ci.nsIPermissionManager.ALLOW_ACTION,
- { url: "https://allowlisted.example.com" });
-
-function clearPermissions() {
- SpecialPowers.removePermission("trackingprotection",
- { url: "https://allowlisted.example.com" });
- ok(!SpecialPowers.testPermission("trackingprotection",
- Ci.nsIPermissionManager.ALLOW_ACTION,
- { url: "https://allowlisted.example.com" }));
-}
-
-SpecialPowers.pushPrefEnv(
- {"set" : [["urlclassifier.trackingTable", "test-track-simple"],
- ["privacy.trackingprotection.enabled", true],
- ["channelclassifier.allowlist_example", true]]},
- test);
-
-function test() {
- SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
- UrlClassifierTestUtils.addTestTrackers().then(() => {
- document.getElementById("testFrame").src = "allowlistAnnotatedFrame.html";
- });
-}
-
-// Expected finish() call is in "allowlistedAnnotatedFrame.html".
-SimpleTest.waitForExplicitFinish();
-
-</script>
-
-</pre>
-<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html b/toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html
deleted file mode 100644
index 1c149406a..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html
+++ /dev/null
@@ -1,305 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Bug 1272239 - Test gethash.</title>
- <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
- <script type="text/javascript" src="classifierHelper.js"></script>
- <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-const MALWARE_LIST = "test-malware-simple";
-const MALWARE_HOST1 = "malware.example.com/";
-const MALWARE_HOST2 = "test1.example.com/";
-
-const UNWANTED_LIST = "test-unwanted-simple";
-const UNWANTED_HOST1 = "unwanted.example.com/";
-const UNWANTED_HOST2 = "test2.example.com/";
-
-
-const UNUSED_MALWARE_HOST = "unused.malware.com/";
-const UNUSED_UNWANTED_HOST = "unused.unwanted.com/";
-
-const GETHASH_URL =
- "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/gethash.sjs";
-
-var gPreGethashCounter = 0;
-var gCurGethashCounter = 0;
-
-var expectLoad = false;
-
-function loadTestFrame() {
- return new Promise(function(resolve, reject) {
- var iframe = document.createElement("iframe");
- iframe.setAttribute("src", "gethashFrame.html");
- document.body.appendChild(iframe);
-
- iframe.onload = function() {
- document.body.removeChild(iframe);
- resolve();
- };
- }).then(getGethashCounter);
-}
-
-function getGethashCounter() {
- return new Promise(function(resolve, reject) {
- var xhr = new XMLHttpRequest;
- xhr.open("PUT", GETHASH_URL + "?gethashcount");
- xhr.setRequestHeader("Content-Type", "text/plain");
- xhr.onreadystatechange = function() {
- if (this.readyState == this.DONE) {
- gPreGethashCounter = gCurGethashCounter;
- gCurGethashCounter = parseInt(xhr.response);
- resolve();
- }
- };
- xhr.send();
- });
-}
-
-// calculate the fullhash and send it to gethash server
-function addCompletionToServer(list, url) {
- return new Promise(function(resolve, reject) {
- var listParam = "list=" + list;
- var fullhashParam = "fullhash=" + hash(url);
-
- var xhr = new XMLHttpRequest;
- xhr.open("PUT", GETHASH_URL + "?" + listParam + "&" + fullhashParam, true);
- xhr.setRequestHeader("Content-Type", "text/plain");
- xhr.onreadystatechange = function() {
- if (this.readyState == this.DONE) {
- resolve();
- }
- };
- xhr.send();
- });
-}
-
-function hash(str) {
- function bytesFromString(str) {
- var converter =
- SpecialPowers.Cc["@mozilla.org/intl/scriptableunicodeconverter"]
- .createInstance(SpecialPowers.Ci.nsIScriptableUnicodeConverter);
- converter.charset = "UTF-8";
- return converter.convertToByteArray(str);
- }
-
- var hasher = SpecialPowers.Cc["@mozilla.org/security/hash;1"]
- .createInstance(SpecialPowers.Ci.nsICryptoHash);
-
- var data = bytesFromString(str);
- hasher.init(hasher.SHA256);
- hasher.update(data, data.length);
-
- return hasher.finish(true);
-}
-
-// setup function allows classifier send gethash request for test database
-// also it calculate to fullhash for url and store those hashes in gethash sjs.
-function setup() {
- classifierHelper.allowCompletion([MALWARE_LIST, UNWANTED_LIST], GETHASH_URL);
-
- return Promise.all([
- addCompletionToServer(MALWARE_LIST, MALWARE_HOST1),
- addCompletionToServer(MALWARE_LIST, MALWARE_HOST2),
- addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST1),
- addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST2),
- ]);
-}
-
-// Reset function in helper try to simulate the behavior we restart firefox
-function reset() {
- return classifierHelper.resetDB()
- .catch(err => {
- ok(false, "Couldn't update classifier. Error code: " + errorCode);
- // Abort test.
- SimpleTest.finish();
- });
-}
-
-function updateUnusedUrl() {
- var testData = [
- { url: UNUSED_MALWARE_HOST, db: MALWARE_LIST },
- { url: UNUSED_UNWANTED_HOST, db: UNWANTED_LIST }
- ];
-
- return classifierHelper.addUrlToDB(testData)
- .catch(err => {
- ok(false, "Couldn't update classifier. Error code: " + err);
- // Abort test.
- SimpleTest.finish();
- });
-}
-
-function addPrefixToDB() {
- return update(true);
-}
-
-function addCompletionToDB() {
- return update(false);
-}
-
-function update(prefix = false) {
- var length = prefix ? 4 : 32;
- var testData = [
- { url: MALWARE_HOST1, db: MALWARE_LIST, len: length },
- { url: MALWARE_HOST2, db: MALWARE_LIST, len: length },
- { url: UNWANTED_HOST1, db: UNWANTED_LIST, len: length },
- { url: UNWANTED_HOST2, db: UNWANTED_LIST, len: length }
- ];
-
- return classifierHelper.addUrlToDB(testData)
- .catch(err => {
- ok(false, "Couldn't update classifier. Error code: " + errorCode);
- // Abort test.
- SimpleTest.finish();
- });
-}
-
-// This testcase is to make sure gethash works:
-// 1. Add prefixes to DB.
-// 2. Load test frame contains malware & unwanted url, those urls should be blocked.
-// 3. The second step should also trigger a gethash request since completions is not in
-// either cache or DB.
-// 4. Load test frame again, since completions is stored in cache now, no gethash
-// request should be triggered.
-function testGethash() {
- return Promise.resolve()
- .then(addPrefixToDB)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
- .then(reset);
-}
-
-// This testcase is to make sure an update request will clear completion cache:
-// 1. Add prefixes to DB.
-// 2. Load test frame, this should trigger a gethash request
-// 3. Trigger an update, completion cache should be cleared now.
-// 4. Load test frame again, since cache is cleared now, gethash request should be triggered.
-function testUpdateClearCache() {
- return Promise.resolve()
- .then(addPrefixToDB)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
- .then(updateUnusedUrl)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
- .then(reset);
-}
-
-// This testcae is to make sure completions in update works:
-// 1. Add completions to DB.
-// 2. Load test frame, since completions is stored in DB, gethash request should
-// not be triggered.
-function testUpdate() {
- return Promise.resolve()
- .then(addCompletionToDB)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
- .then(reset);
-}
-
-// This testcase is to make sure an update request will not clear completions in DB:
-// 1. Add completions to DB.
-// 2. Load test frame to make sure completions is stored in database, in this case, gethash
-// should not be triggered.
-// 3. Trigger an update, cache is cleared, but completions in DB should still remain.
-// 4. Load test frame again, since completions is in DB, gethash request should not be triggered.
-function testUpdateNotClearCompletions() {
- return Promise.resolve()
- .then(addCompletionToDB)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
- .then(updateUnusedUrl)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
- .then(reset);
-}
-
-// This testcase is to make sure completion store in DB will properly load after restarting.
-// 1. Add completions to DB.
-// 2. Simulate firefox restart by calling reloadDatabase.
-// 3. Load test frame, since completions should be loaded from DB, no gethash request should
-// be triggered.
-function testUpdateCompletionsAfterReload() {
- return Promise.resolve()
- .then(addCompletionToDB)
- .then(classifierHelper.reloadDatabase)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
- .then(reset);
-}
-
-// This testcase is to make sure cache will be cleared after restarting
-// 1. Add prefixes to DB.
-// 2. Load test frame, this should trigger a gethash request and completions will be stored in
-// cache.
-// 3. Load test frame again, no gethash should be triggered because of cache.
-// 4. Simulate firefox restart by calling reloadDatabase.
-// 5. Load test frame again, since cache is cleared, gethash request should be triggered.
-function testGethashCompletionsAfterReload() {
- return Promise.resolve()
- .then(addPrefixToDB)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered."); })
- .then(classifierHelper.reloadDatabase)
- .then(loadTestFrame)
- .then(() => {
- ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered."); })
- .then(reset);
-}
-
-function runTest() {
- Promise.resolve()
- .then(classifierHelper.waitForInit)
- .then(setup)
- .then(testGethash)
- .then(testUpdateClearCache)
- .then(testUpdate)
- .then(testUpdateNotClearCompletions)
- .then(testUpdateCompletionsAfterReload)
- .then(testGethashCompletionsAfterReload)
- .then(function() {
- SimpleTest.finish();
- }).catch(function(e) {
- ok(false, "Some test failed with error " + e);
- SimpleTest.finish();
- });
-}
-
-SimpleTest.waitForExplicitFinish();
-
-// 'network.predictor.enabled' is disabled because if other testcase load
-// evil.js, evil.css ...etc resources, it may cause we load them from cache
-// directly and bypass classifier check
-SpecialPowers.pushPrefEnv({"set": [
- ["browser.safebrowsing.malware.enabled", true],
- ["network.predictor.enabled", false],
- ["urlclassifier.gethash.timeout_ms", 30000],
-]}, runTest);
-
-</script>
-</pre>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html b/toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html
deleted file mode 100644
index 5814fff00..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html
+++ /dev/null
@@ -1,50 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Test the URI Classifier</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-Components.utils.import("resource://testing-common/UrlClassifierTestUtils.jsm");
-
-function cleanup() {
- SpecialPowers.clearUserPref("privacy.trackingprotection.enabled");
- SpecialPowers.clearUserPref("channelclassifier.allowlist_example");
-}
-
-SpecialPowers.pushPrefEnv(
- {"set" : [["urlclassifier.trackingTable", "test-track-simple"]]},
- test);
-
-function test() {
- UrlClassifierTestUtils.addTestTrackers().then(() => {
- SpecialPowers.setBoolPref("privacy.trackingprotection.enabled", true);
- // Make sure chrome:// URIs are processed. This does not white-list
- // any URIs unless 'https://allowlisted.example.com' is added in the
- // permission manager (see test_allowlisted_annotations.html)
- SpecialPowers.setBoolPref("channelclassifier.allowlist_example", true);
- document.getElementById("testFrame").src = "classifiedAnnotatedFrame.html";
- });
-}
-
-// Expected finish() call is in "classifiedAnnotatedFrame.html".
-SimpleTest.waitForExplicitFinish();
-
-</script>
-
-</pre>
-<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier.html
deleted file mode 100644
index 9533db426..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_classifier.html
+++ /dev/null
@@ -1,65 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Test the URI Classifier</title>
- <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
- <script type="text/javascript" src="classifierHelper.js"></script>
- <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
-</head>
-
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var firstLoad = true;
-
-// Add some URLs to the malware database.
-var testData = [
- { url: "malware.example.com/",
- db: "test-malware-simple"
- },
- { url: "unwanted.example.com/",
- db: "test-unwanted-simple"
- }
-];
-
-function loadTestFrame() {
- document.getElementById("testFrame").src = "classifierFrame.html";
-}
-
-// Expected finish() call is in "classifierFrame.html".
-SimpleTest.waitForExplicitFinish();
-
-function updateSuccess() {
- SpecialPowers.pushPrefEnv(
- {"set" : [["browser.safebrowsing.malware.enabled", true]]},
- loadTestFrame);
-}
-
-function updateError(errorCode) {
- ok(false, "Couldn't update classifier. Error code: " + errorCode);
- // Abort test.
- SimpleTest.finish();
-}
-
-SpecialPowers.pushPrefEnv(
- {"set" : [["urlclassifier.malwareTable", "test-malware-simple,test-unwanted-simple"],
- ["urlclassifier.phishTable", "test-phish-simple"]]},
- function() {
- classifierHelper.waitForInit()
- .then(() => classifierHelper.addUrlToDB(testData))
- .then(updateSuccess)
- .catch(err => {
- updateError(err);
- });
- });
-
-</script>
-
-</pre>
-<iframe id="testFrame" onload=""></iframe>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html
deleted file mode 100644
index 7423d3e8e..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html
+++ /dev/null
@@ -1,149 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Bug 1281083 - Changing the urlclassifier.*Table prefs doesn't take effect before the next browser restart.</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <script type="text/javascript" src="classifierHelper.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-const testTable = "moz-track-digest256";
-const UPDATE_URL = "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/update.sjs";
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-var prefService = Cc["@mozilla.org/preferences-service;1"]
- .getService(Ci.nsIPrefService);
-
-var timer = Cc["@mozilla.org/timer;1"]
- .createInstance(Ci.nsITimer);
-
-// If default preference contain the table we want to test,
-// We should change test table to a different one.
-var trackingTables = SpecialPowers.getCharPref("urlclassifier.trackingTable").split(",");
-ok(!trackingTables.includes(testTable), "test table should not be in the preference");
-
-var listmanager = Cc["@mozilla.org/url-classifier/listmanager;1"].
- getService(Ci.nsIUrlListManager);
-
-is(listmanager.getGethashUrl(testTable), "",
- "gethash url for test table should be empty before setting to preference");
-
-function loadTestFrame() {
- // gethash url of test table "moz-track-digest256" should be updated
- // after setting preference.
- var url = listmanager.getGethashUrl(testTable);
- var expected = SpecialPowers.getCharPref("browser.safebrowsing.provider.mozilla.gethashURL");
-
- is(url, expected, testTable + " matches its gethash url");
-
- // Trigger update
- listmanager.disableUpdate(testTable);
- listmanager.enableUpdate(testTable);
- listmanager.maybeToggleUpdateChecking();
-
- // We wait until "nextupdattime" was set as a signal that update is complete.
- waitForUpdateSuccess(function() {
- document.getElementById("testFrame").src = "bug_1281083.html";
- });
-}
-
-function waitForUpdateSuccess(callback) {
- let nextupdatetime =
- SpecialPowers.getCharPref("browser.safebrowsing.provider.mozilla.nextupdatetime");
-
- if (nextupdatetime !== "1") {
- callback();
- return;
- }
-
- timer.initWithCallback(function() {
- waitForUpdateSuccess(callback);
- }, 10, Components.interfaces.nsITimer.TYPE_ONE_SHOT);
-}
-
-function addCompletionToServer(list, url) {
- return new Promise(function(resolve, reject) {
- var listParam = "list=" + list;
- var fullhashParam = "fullhash=" + hash(url);
-
- var xhr = new XMLHttpRequest;
- xhr.open("PUT", UPDATE_URL + "?" +
- listParam + "&" +
- fullhashParam, true);
- xhr.setRequestHeader("Content-Type", "text/plain");
- xhr.onreadystatechange = function() {
- if (this.readyState == this.DONE) {
- resolve();
- }
- };
- xhr.send();
- });
-}
-
-function hash(str) {
- function bytesFromString(str) {
- var converter =
- SpecialPowers.Cc["@mozilla.org/intl/scriptableunicodeconverter"]
- .createInstance(SpecialPowers.Ci.nsIScriptableUnicodeConverter);
- converter.charset = "UTF-8";
- return converter.convertToByteArray(str);
- }
-
- var hasher = SpecialPowers.Cc["@mozilla.org/security/hash;1"]
- .createInstance(SpecialPowers.Ci.nsICryptoHash);
-
- var data = bytesFromString(str);
- hasher.init(hasher.SHA256);
- hasher.update(data, data.length);
-
- return hasher.finish(true);
-}
-
-function runTest() {
- /**
- * In this test we try to modify only urlclassifier.*Table preference to see if
- * url specified in the table will be blocked after update.
- */
- var pushPrefPromise = SpecialPowers.pushPrefEnv(
- {"set" : [["urlclassifier.trackingTable", testTable]]});
-
- // To make sure url is not blocked by an already blocked url.
- // Here we use non-tracking.example.com as a tracked url.
- // Since this table is only used in this bug, so it won't affect other testcases.
- var addCompletePromise =
- addCompletionToServer(testTable, "bug1281083.example.com/");
-
- Promise.all([pushPrefPromise, addCompletePromise])
- .then(() => {
- loadTestFrame();
- });
-}
-
-// Set nextupdatetime to 1 to trigger an update
-SpecialPowers.pushPrefEnv(
- {"set" : [["privacy.trackingprotection.enabled", true],
- ["channelclassifier.allowlist_example", true],
- ["browser.safebrowsing.provider.mozilla.nextupdatetime", "1"],
- ["browser.safebrowsing.provider.mozilla.lists", testTable],
- ["browser.safebrowsing.provider.mozilla.updateURL", UPDATE_URL]]},
- runTest);
-
-// Expected finish() call is in "bug_1281083.html".
-SimpleTest.waitForExplicitFinish();
-
-</script>
-</pre>
-<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
-</body>
-</html>
-
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html
deleted file mode 100644
index 1f54d45b0..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html
+++ /dev/null
@@ -1,76 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Test the URI Classifier</title>
- <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
- <script type="text/javascript" src="classifierHelper.js"></script>
- <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
-</head>
-
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-// Add some URLs to the malware database.
-var testData = [
- { url: "example.com/tests/toolkit/components/url-classifier/tests/mochitest/evilWorker.js",
- db: "test-malware-simple"
- },
- { url: "example.com/tests/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js",
- db: "test-unwanted-simple"
- }
-];
-
-function loadTestFrame() {
- document.getElementById("testFrame").src =
- "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/workerFrame.html";
-}
-
-function onmessage(event)
-{
- var pieces = event.data.split(':');
- if (pieces[0] == "finish") {
- SimpleTest.finish();
- return;
- }
-
- is(pieces[0], "success", pieces[1]);
-}
-
-function updateSuccess() {
- SpecialPowers.pushPrefEnv(
- {"set" : [["browser.safebrowsing.malware.enabled", true]]},
- loadTestFrame);
-}
-
-function updateError(errorCode) {
- ok(false, "Couldn't update classifier. Error code: " + errorCode);
- // Abort test.
- SimpleTest.finish();
-};
-
-SpecialPowers.pushPrefEnv(
- {"set" : [["urlclassifier.malwareTable", "test-malware-simple,test-unwanted-simple"],
- ["urlclassifier.phishTable", "test-phish-simple"]]},
- function() {
- classifierHelper.waitForInit()
- .then(() => classifierHelper.addUrlToDB(testData))
- .then(updateSuccess)
- .catch(err => {
- updateError(err);
- });
- });
-
-window.addEventListener("message", onmessage, false);
-
-SimpleTest.waitForExplicitFinish();
-
-</script>
-
-</pre>
-<iframe id="testFrame" onload=""></iframe>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html b/toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html
deleted file mode 100644
index 96fa2891a..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html
+++ /dev/null
@@ -1,121 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Bug 1233914 - ping doesn't honor the TP list here.</title>
- <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
- SimpleTest.requestFlakyTimeout("Delay to make sure ping is made prior than XHR");
-
- const timeout = 200;
- const host_nottrack = "http://not-tracking.example.com/";
- const host_track = "http://trackertest.org/";
- const path_ping = "tests/toolkit/components/url-classifier/tests/mochitest/ping.sjs";
- const TP_ENABLE_PREF = "privacy.trackingprotection.enabled";
-
- function testPingNonBlacklist() {
- SpecialPowers.setBoolPref(TP_ENABLE_PREF, true);
-
- var msg = "ping should reach page not in blacklist";
- var expectPing = true;
- var id = "1111";
- ping(id, host_nottrack);
-
- return new Promise(function(resolve, reject) {
- setTimeout(function() {
- isPinged(id, expectPing, msg, resolve);
- }, timeout);
- });
- }
-
- function testPingBlacklistSafebrowsingOff() {
- SpecialPowers.setBoolPref(TP_ENABLE_PREF, false);
-
- var msg = "ping should reach page in blacklist when tracking protection is off";
- var expectPing = true;
- var id = "2222";
- ping(id, host_track);
-
- return new Promise(function(resolve, reject) {
- setTimeout(function() {
- isPinged(id, expectPing, msg, resolve);
- }, timeout);
- });
- }
-
- function testPingBlacklistSafebrowsingOn() {
- SpecialPowers.setBoolPref(TP_ENABLE_PREF, true);
-
- var msg = "ping should not reach page in blacklist when tracking protection is on";
- var expectPing = false;
- var id = "3333";
- ping(id, host_track);
-
- return new Promise(function(resolve, reject) {
- setTimeout(function() {
- isPinged(id, expectPing, msg, resolve);
- }, timeout);
- });
- }
-
- function ping(id, host) {
- var elm = document.createElement("a");
- elm.setAttribute('ping', host + path_ping + "?id=" + id);
- elm.setAttribute('href', "#");
- document.body.appendChild(elm);
-
- // Trigger ping.
- elm.click();
-
- document.body.removeChild(elm);
- }
-
- function isPinged(id, expected, msg, callback) {
- var url = "http://mochi.test:8888/" + path_ping;
- var xhr = new XMLHttpRequest();
- xhr.open('GET', url + "?id=" + id);
- xhr.onload = function() {
- var isPinged = xhr.response === "ping";
- is(expected, isPinged, msg);
-
- callback();
- };
- xhr.send();
- }
-
- function cleanup() {
- SpecialPowers.clearUserPref(TP_ENABLE_PREF);
- }
-
- function runTest() {
- Promise.resolve()
- .then(testPingNonBlacklist)
- .then(testPingBlacklistSafebrowsingOff)
- .then(testPingBlacklistSafebrowsingOn)
- .then(function() {
- SimpleTest.finish();
- }).catch(function(e) {
- ok(false, "Some test failed with error " + e);
- SimpleTest.finish();
- });
- }
-
- SimpleTest.waitForExplicitFinish();
- SimpleTest.registerCleanupFunction(cleanup);
- SpecialPowers.pushPrefEnv({"set": [
- ["browser.send_pings", true],
- ["urlclassifier.trackingTable", "test-track-simple"],
- ]}, runTest);
-
-</script>
-</pre>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classify_track.html b/toolkit/components/url-classifier/tests/mochitest/test_classify_track.html
deleted file mode 100644
index a868d7960..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_classify_track.html
+++ /dev/null
@@ -1,162 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Bug 1262406 - Track element doesn't use the URL classifier.</title>
- <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
- <script type="text/javascript" src="classifierHelper.js"></script>
- <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
- const PREF = "browser.safebrowsing.malware.enabled";
- const track_path = "tests/toolkit/components/url-classifier/tests/mochitest/basic.vtt";
- const malware_url = "http://malware.example.com/" + track_path;
- const validtrack_url = "http://mochi.test:8888/" + track_path;
-
- var video = document.createElement("video");
- video.src = "seek.webm";
- video.crossOrigin = "anonymous";
-
- document.body.appendChild(video);
-
- function testValidTrack() {
- SpecialPowers.setBoolPref(PREF, true);
-
- return new Promise(function(resolve, reject) {
- var track = document.createElement("track");
- track.src = validtrack_url;
- video.appendChild(track);
-
- function onload() {
- ok(true, "Track should be loaded when url is not in blacklist");
- finish();
- }
-
- function onerror() {
- ok(false, "Error while loading track");
- finish();
- }
-
- function finish() {
- track.removeEventListener("load", onload);
- track.removeEventListener("error", onerror)
- resolve();
- }
-
- track.addEventListener("load", onload);
- track.addEventListener("error", onerror)
- });
- }
-
- function testBlacklistTrackSafebrowsingOff() {
- SpecialPowers.setBoolPref(PREF, false);
-
- return new Promise(function(resolve, reject) {
- var track = document.createElement("track");
- track.src = malware_url;
- video.appendChild(track);
-
- function onload() {
- ok(true, "Track should be loaded when url is in blacklist and safebrowsing is off");
- finish();
- }
-
- function onerror() {
- ok(false, "Error while loading track");
- finish();
- }
-
- function finish() {
- track.removeEventListener("load", onload);
- track.removeEventListener("error", onerror)
- resolve();
- }
-
- track.addEventListener("load", onload);
- track.addEventListener("error", onerror)
- });
- }
-
- function testBlacklistTrackSafebrowsingOn() {
- SpecialPowers.setBoolPref(PREF, true);
-
- return new Promise(function(resolve, reject) {
- var track = document.createElement("track");
-
- // Add a query string parameter here to avoid url classifier bypass classify
- // because of cache.
- track.src = malware_url + "?testsbon";
- video.appendChild(track);
-
- function onload() {
- ok(false, "Unexpected result while loading track in blacklist");
- finish();
- }
-
- function onerror() {
- ok(true, "Track should not be loaded when url is in blacklist and safebrowsing is on");
- finish();
- }
-
- function finish() {
- track.removeEventListener("load", onload);
- track.removeEventListener("error", onerror)
- resolve();
- }
-
- track.addEventListener("load", onload);
- track.addEventListener("error", onerror)
- });
- }
-
- function cleanup() {
- SpecialPowers.clearUserPref(PREF);
- }
-
- function setup() {
- var testData = [
- { url: "malware.example.com/",
- db: "test-malware-simple"
- }
- ];
-
- return classifierHelper.addUrlToDB(testData)
- .catch(function(err) {
- ok(false, "Couldn't update classifier. Error code: " + err);
- // Abort test.
- SimpleTest.finish();
- });
- }
-
- function runTest() {
- Promise.resolve()
- .then(classifierHelper.waitForInit)
- .then(setup)
- .then(testValidTrack)
- .then(testBlacklistTrackSafebrowsingOff)
- .then(testBlacklistTrackSafebrowsingOn)
- .then(function() {
- SimpleTest.finish();
- }).catch(function(e) {
- ok(false, "Some test failed with error " + e);
- SimpleTest.finish();
- });
- }
-
- SimpleTest.waitForExplicitFinish();
- SimpleTest.registerCleanupFunction(cleanup);
- SpecialPowers.pushPrefEnv({"set": [
- ["media.webvtt.regions.enabled", true],
- ["urlclassifier.malwareTable", "test-malware-simple"],
- ]}, runTest);
-
-</script>
-</pre>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_donottrack.html b/toolkit/components/url-classifier/tests/mochitest/test_donottrack.html
deleted file mode 100644
index 56003e7eb..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_donottrack.html
+++ /dev/null
@@ -1,150 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Bug 1258033 - Fix the DNT loophole for tracking protection</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-var mainWindow = window.QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIWebNavigation)
- .QueryInterface(Ci.nsIDocShellTreeItem)
- .rootTreeItem
- .QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIDOMWindow);
-
-const tests = [
- // DNT turned on and TP turned off, DNT signal sent in both private browsing
- // and normal mode.
- {
- setting: {dntPref:true, tpPref:false, tppbPref:false, pbMode:true},
- expected: {dnt: "1"},
- },
- {
- setting: {dntPref:true, tpPref:false, tppbPref:false, pbMode:false},
- expected: {dnt: "1"}
- },
- // DNT turned off and TP turned on globally, DNT signal sent in both private
- // browsing and normal mode.
- {
- setting: {dntPref:false, tpPref:true, tppbPref:false, pbMode:true},
- expected: {dnt: "1"}
- },
- {
- setting: {dntPref:false, tpPref:true, tppbPref:false, pbMode:false},
- expected: {dnt: "1"}
- },
- // DNT turned off and TP in Private Browsing only, DNT signal sent in private
- // browsing mode only.
- {
- setting: {dntPref:false, tpPref:false, tppbPref:true, pbMode:true},
- expected: {dnt: "1"}
- },
- {
- setting: {dntPref:false, tpPref:false, tppbPref:true, pbMode:false},
- expected: {dnt: "unspecified"}
- },
- // DNT turned off and TP turned off, DNT signal is never sent.
- {
- setting: {dntPref:false, tpPref:false, tppbPref:false, pbMode:true},
- expected: {dnt: "unspecified"}
- },
- {
- setting: {dntPref:false, tpPref:false, tppbPref:false, pbMode:false},
- expected: {dnt: "unspecified"}
- },
-]
-
-const DNT_PREF = 'privacy.donottrackheader.enabled';
-const TP_PREF = 'privacy.trackingprotection.enabled';
-const TP_PB_PREF = 'privacy.trackingprotection.pbmode.enabled';
-
-const contentPage =
- "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/dnt.html";
-
-Components.utils.import("resource://gre/modules/Services.jsm");
-
-function whenDelayedStartupFinished(aWindow, aCallback) {
- Services.obs.addObserver(function observer(aSubject, aTopic) {
- if (aWindow == aSubject) {
- Services.obs.removeObserver(observer, aTopic);
- setTimeout(aCallback, 0);
- }
- }, "browser-delayed-startup-finished", false);
-}
-
-function executeTest(test) {
- SpecialPowers.pushPrefEnv({"set" : [
- [DNT_PREF, test.setting.dntPref],
- [TP_PREF, test.setting.tpPref],
- [TP_PB_PREF, test.setting.tppbPref]
- ]});
-
- var win = mainWindow.OpenBrowserWindow({private: test.setting.pbMode});
-
- return new Promise(function(resolve, reject) {
- win.addEventListener("load", function onLoad() {
- win.removeEventListener("load", onLoad, false);
- whenDelayedStartupFinished(win, function() {
- win.addEventListener("DOMContentLoaded", function onInnerLoad() {
- if (win.content.location.href != contentPage) {
- win.gBrowser.loadURI(contentPage);
- return;
- }
-
- win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
-
- win.content.addEventListener('message', function (event) {
- let [key, value] = event.data.split("=");
- if (key == "finish") {
- win.close();
- resolve();
- } else if (key == "navigator.doNotTrack") {
- is(value, test.expected.dnt, "navigator.doNotTrack should be " + test.expected.dnt);
- } else if (key == "DNT") {
- let msg = test.expected.dnt == "1" ? "" : "not ";
- is(value, test.expected.dnt, "DNT header should " + msg + "be sent");
- } else {
- ok(false, "unexpected message");
- }
- });
- }, true);
- SimpleTest.executeSoon(function() { win.gBrowser.loadURI(contentPage); });
- });
- }, true);
- });
-}
-
-let loop = function loop(index) {
- if (index >= tests.length) {
- SimpleTest.finish();
- return;
- }
-
- let test = tests[index];
- let next = function next() {
- loop(index + 1);
- };
- let result = executeTest(test);
- result.then(next, next);
-};
-
-SimpleTest.waitForExplicitFinish();
-loop(0);
-
-</script>
-
-</pre>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_gethash.html b/toolkit/components/url-classifier/tests/mochitest/test_gethash.html
deleted file mode 100644
index af995e2a5..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_gethash.html
+++ /dev/null
@@ -1,157 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Bug 1272239 - Test gethash.</title>
- <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
- <script type="text/javascript" src="classifierHelper.js"></script>
- <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-<iframe id="testFrame1" onload=""></iframe>
-<iframe id="testFrame2" onload=""></iframe>
-
-<script class="testbody" type="text/javascript">
-
-const MALWARE_LIST = "test-malware-simple";
-const MALWARE_HOST = "malware.example.com/";
-
-const UNWANTED_LIST = "test-unwanted-simple";
-const UNWANTED_HOST = "unwanted.example.com/";
-
-const GETHASH_URL = "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/gethash.sjs";
-const NOTEXIST_URL = "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/nonexistserver.sjs";
-
-var shouldLoad = false;
-
-// In this testcase we store prefixes to localdb and send the fullhash to gethash server.
-// When access the test page gecko should trigger gethash request to server and
-// get the completion response.
-function loadTestFrame(id) {
- return new Promise(function(resolve, reject) {
-
- var iframe = document.getElementById(id);
- iframe.setAttribute("src", "gethashFrame.html");
-
- iframe.onload = function() {
- resolve();
- };
- });
-}
-
-// add 4-bytes prefixes to local database, so when we access the url,
-// it will trigger gethash request.
-function addPrefixToDB(list, url) {
- var testData = [{ db: list, url: url, len: 4 }];
-
- return classifierHelper.addUrlToDB(testData)
- .catch(function(err) {
- ok(false, "Couldn't update classifier. Error code: " + err);
- // Abort test.
- SimpleTest.finish();
- });
-}
-
-// calculate the fullhash and send it to gethash server
-function addCompletionToServer(list, url) {
- return new Promise(function(resolve, reject) {
- var listParam = "list=" + list;
- var fullhashParam = "fullhash=" + hash(url);
-
- var xhr = new XMLHttpRequest;
- xhr.open("PUT", GETHASH_URL + "?" +
- listParam + "&" +
- fullhashParam, true);
- xhr.setRequestHeader("Content-Type", "text/plain");
- xhr.onreadystatechange = function() {
- if (this.readyState == this.DONE) {
- resolve();
- }
- };
- xhr.send();
- });
-}
-
-function hash(str) {
- function bytesFromString(str) {
- var converter =
- SpecialPowers.Cc["@mozilla.org/intl/scriptableunicodeconverter"]
- .createInstance(SpecialPowers.Ci.nsIScriptableUnicodeConverter);
- converter.charset = "UTF-8";
- return converter.convertToByteArray(str);
- }
-
- var hasher = SpecialPowers.Cc["@mozilla.org/security/hash;1"]
- .createInstance(SpecialPowers.Ci.nsICryptoHash);
-
- var data = bytesFromString(str);
- hasher.init(hasher.SHA256);
- hasher.update(data, data.length);
-
- return hasher.finish(true);
-}
-
-function setup404() {
- shouldLoad = true;
-
- classifierHelper.allowCompletion([MALWARE_LIST, UNWANTED_LIST], NOTEXIST_URL);
-
- return Promise.all([
- addPrefixToDB(MALWARE_LIST, MALWARE_HOST),
- addPrefixToDB(UNWANTED_LIST, UNWANTED_HOST)
- ]);
-}
-
-function setup() {
- classifierHelper.allowCompletion([MALWARE_LIST, UNWANTED_LIST], GETHASH_URL);
-
- return Promise.all([
- addPrefixToDB(MALWARE_LIST, MALWARE_HOST),
- addPrefixToDB(UNWANTED_LIST, UNWANTED_HOST),
- addCompletionToServer(MALWARE_LIST, MALWARE_HOST),
- addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST),
- ]);
-}
-
-// manually reset DB to make sure next test won't be affected by cache.
-function reset() {
- return classifierHelper.resetDB;
-}
-
-function runTest() {
- Promise.resolve()
- // This test resources get blocked when gethash returns successfully
- .then(classifierHelper.waitForInit)
- .then(setup)
- .then(() => loadTestFrame("testFrame1"))
- .then(reset)
- // This test resources are not blocked when gethash returns an error
- .then(setup404)
- .then(() => loadTestFrame("testFrame2"))
- .then(function() {
- SimpleTest.finish();
- }).catch(function(e) {
- ok(false, "Some test failed with error " + e);
- SimpleTest.finish();
- });
-}
-
-SimpleTest.waitForExplicitFinish();
-
-// 'network.predictor.enabled' is disabled because if other testcase load
-// evil.js, evil.css ...etc resources, it may cause we load them from cache
-// directly and bypass classifier check
-SpecialPowers.pushPrefEnv({"set": [
- ["browser.safebrowsing.malware.enabled", true],
- ["network.predictor.enabled", false],
- ["urlclassifier.gethash.timeout_ms", 30000],
-]}, runTest);
-
-</script>
-</pre>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_lookup_system_principal.html b/toolkit/components/url-classifier/tests/mochitest/test_lookup_system_principal.html
deleted file mode 100644
index fa61e6a00..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_lookup_system_principal.html
+++ /dev/null
@@ -1,29 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Test that lookup() on a system principal doesn't crash</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script type="text/javascript">
-
-var Cc = Components.classes;
-var Ci = Components.interfaces;
-
-var dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
- .getService(Ci.nsIUrlClassifierDBService);
-
-dbService.lookup(document.nodePrincipal, "", function(arg) {});
-
-ok(true, "lookup() didn't crash");
-
-</script>
-</pre>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html b/toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html
deleted file mode 100644
index 02ef57b46..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html
+++ /dev/null
@@ -1,154 +0,0 @@
-<!DOCTYPE HTML>
-<!-- Any copyright is dedicated to the Public Domain.
- http://creativecommons.org/publicdomain/zero/1.0/ -->
-<html>
-<head>
- <title>Test Tracking Protection in Private Browsing mode</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-var mainWindow = window.QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIWebNavigation)
- .QueryInterface(Ci.nsIDocShellTreeItem)
- .rootTreeItem
- .QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIDOMWindow);
-var contentPage = "http://www.itisatrap.org/tests/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html";
-
-Components.utils.import("resource://gre/modules/Services.jsm");
-Components.utils.import("resource://testing-common/UrlClassifierTestUtils.jsm");
-
-function whenDelayedStartupFinished(aWindow, aCallback) {
- Services.obs.addObserver(function observer(aSubject, aTopic) {
- if (aWindow == aSubject) {
- Services.obs.removeObserver(observer, aTopic);
- setTimeout(aCallback, 0);
- }
- }, "browser-delayed-startup-finished", false);
-}
-
-function testOnWindow(aPrivate, aCallback) {
- var win = mainWindow.OpenBrowserWindow({private: aPrivate});
- win.addEventListener("load", function onLoad() {
- win.removeEventListener("load", onLoad, false);
- whenDelayedStartupFinished(win, function() {
- win.addEventListener("DOMContentLoaded", function onInnerLoad() {
- if (win.content.location.href != contentPage) {
- win.gBrowser.loadURI(contentPage);
- return;
- }
- win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
-
- win.content.addEventListener('load', function innerLoad2() {
- win.content.removeEventListener('load', innerLoad2, false);
- SimpleTest.executeSoon(function() { aCallback(win); });
- }, false, true);
- }, true);
- SimpleTest.executeSoon(function() { win.gBrowser.loadURI(contentPage); });
- });
- }, true);
-}
-
-var badids = [
- "badscript",
- "badimage",
- "badcss"
-];
-
-function checkLoads(aWindow, aBlocked) {
- var win = aWindow.content;
- is(win.document.getElementById("badscript").dataset.touched, aBlocked ? "no" : "yes", "Should not load tracking javascript");
- is(win.document.getElementById("badimage").dataset.touched, aBlocked ? "no" : "yes", "Should not load tracking images");
- is(win.document.getElementById("goodscript").dataset.touched, "yes", "Should load whitelisted tracking javascript");
-
- var elt = win.document.getElementById("styleCheck");
- var style = win.document.defaultView.getComputedStyle(elt, "");
- isnot(style.visibility, aBlocked ? "hidden" : "", "Should not load tracking css");
-
- is(win.document.blockedTrackingNodeCount, aBlocked ? badids.length : 0, "Should identify all tracking elements");
-
- var blockedTrackingNodes = win.document.blockedTrackingNodes;
-
- // Make sure that every node in blockedTrackingNodes exists in the tree
- // (that may not always be the case but do not expect any nodes to disappear
- // from the tree here)
- var allNodeMatch = true;
- for (var i = 0; i < blockedTrackingNodes.length; i++) {
- var nodeMatch = false;
- for (var j = 0; j < badids.length && !nodeMatch; j++) {
- nodeMatch = nodeMatch ||
- (blockedTrackingNodes[i] == win.document.getElementById(badids[j]));
- }
-
- allNodeMatch = allNodeMatch && nodeMatch;
- }
- is(allNodeMatch, true, "All annotated nodes are expected in the tree");
-
- // Make sure that every node with a badid (see badids) is found in the
- // blockedTrackingNodes. This tells us if we are neglecting to annotate
- // some nodes
- allNodeMatch = true;
- for (var j = 0; j < badids.length; j++) {
- var nodeMatch = false;
- for (var i = 0; i < blockedTrackingNodes.length && !nodeMatch; i++) {
- nodeMatch = nodeMatch ||
- (blockedTrackingNodes[i] == win.document.getElementById(badids[j]));
- }
-
- allNodeMatch = allNodeMatch && nodeMatch;
- }
- is(allNodeMatch, aBlocked, "All tracking nodes are expected to be annotated as such");
-}
-
-SpecialPowers.pushPrefEnv(
- {"set" : [["urlclassifier.trackingTable", "test-track-simple"],
- ["privacy.trackingprotection.enabled", false],
- ["privacy.trackingprotection.pbmode.enabled", true],
- ["channelclassifier.allowlist_example", true]]},
- test);
-
-function test() {
- SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
- UrlClassifierTestUtils.addTestTrackers().then(() => {
- // Normal mode, with the pref (trackers should be loaded)
- testOnWindow(false, function(aWindow) {
- checkLoads(aWindow, false);
- aWindow.close();
-
- // Private Browsing, with the pref (trackers should be blocked)
- testOnWindow(true, function(aWindow) {
- checkLoads(aWindow, true);
- aWindow.close();
-
- // Private Browsing, without the pref (trackers should be loaded)
- SpecialPowers.setBoolPref("privacy.trackingprotection.pbmode.enabled", false);
- testOnWindow(true, function(aWindow) {
- checkLoads(aWindow, false);
- aWindow.close();
- SimpleTest.finish();
- });
- });
- });
- });
-}
-
-SimpleTest.waitForExplicitFinish();
-
-</script>
-
-</pre>
-<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html b/toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html
deleted file mode 100644
index 8066c2a37..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html
+++ /dev/null
@@ -1,87 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
- <title>Bug 1272239 - Only tables with provider could register gethash url in listmanager.</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-// List all the tables
-const prefs = [
- "urlclassifier.phishTable",
- "urlclassifier.malwareTable",
- "urlclassifier.downloadBlockTable",
- "urlclassifier.downloadAllowTable",
- "urlclassifier.trackingTable",
- "urlclassifier.trackingWhitelistTable",
- "urlclassifier.blockedTable"
-];
-
-var prefService = Cc["@mozilla.org/preferences-service;1"]
- .getService(Ci.nsIPrefService);
-
-// Get providers
-var providers = {};
-
-var branch = prefService.getBranch("browser.safebrowsing.provider.");
-var children = branch.getChildList("", {});
-
-for (var child of children) {
- var prefComponents = child.split(".");
- var providerName = prefComponents[0];
- providers[providerName] = {};
-}
-
-// Get lists from |browser.safebrowsing.provider.PROVIDER_NAME.lists| preference.
-var listsWithProvider = [];
-var listsToProvider = [];
-for (var provider in providers) {
- var pref = "browser.safebrowsing.provider." + provider + ".lists";
- var list = SpecialPowers.getCharPref(pref).split(",");
-
- listsToProvider = listsToProvider.concat(list.map( () => { return provider; }));
- listsWithProvider = listsWithProvider.concat(list);
-}
-
-// Get all the lists
-var lists = [];
-for (var pref of prefs) {
- lists = lists.concat(SpecialPowers.getCharPref(pref).split(","));
-}
-
-var listmanager = Cc["@mozilla.org/url-classifier/listmanager;1"].
- getService(Ci.nsIUrlListManager);
-
-for (var list of lists) {
- if (!list)
- continue;
-
- // For lists having a provider, it should have a correct gethash url
- // For lists without a provider, for example, test-malware-simple, it should not
- // have a gethash url.
- var url = listmanager.getGethashUrl(list);
- var index = listsWithProvider.indexOf(list);
- if (index >= 0) {
- var provider = listsToProvider[index];
- var pref = "browser.safebrowsing.provider." + provider + ".gethashURL";
- is(url, SpecialPowers.getCharPref(pref), list + " matches its gethash url");
- } else {
- is(url, "", list + " should not have a gethash url");
- }
-}
-
-</script>
-</pre>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html
deleted file mode 100644
index 7611dd245..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html
+++ /dev/null
@@ -1,107 +0,0 @@
-<!DOCTYPE HTML>
-<!-- Any copyright is dedicated to the Public Domain.
- http://creativecommons.org/publicdomain/zero/1.0/ -->
-<html>
-<head>
- <title>Test Tracking Protection with and without Safe Browsing (Bug #1157081)</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-var mainWindow = window.QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIWebNavigation)
- .QueryInterface(Ci.nsIDocShellTreeItem)
- .rootTreeItem
- .QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIDOMWindow);
-var contentPage = "chrome://mochitests/content/chrome/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html"
-
-Components.utils.import("resource://gre/modules/Services.jsm");
-Components.utils.import("resource://testing-common/UrlClassifierTestUtils.jsm");
-
-function whenDelayedStartupFinished(aWindow, aCallback) {
- Services.obs.addObserver(function observer(aSubject, aTopic) {
- if (aWindow == aSubject) {
- Services.obs.removeObserver(observer, aTopic);
- setTimeout(aCallback, 0);
- }
- }, "browser-delayed-startup-finished", false);
-}
-
-function testOnWindow(aCallback) {
- var win = mainWindow.OpenBrowserWindow();
- win.addEventListener("load", function onLoad() {
- win.removeEventListener("load", onLoad, false);
- whenDelayedStartupFinished(win, function() {
- win.addEventListener("DOMContentLoaded", function onInnerLoad() {
- if (win.content.location.href != contentPage) {
- win.gBrowser.loadURI(contentPage);
- return;
- }
- win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
-
- win.content.addEventListener('load', function innerLoad2() {
- win.content.removeEventListener('load', innerLoad2, false);
- SimpleTest.executeSoon(function() { aCallback(win); });
- }, false, true);
- }, true);
- SimpleTest.executeSoon(function() { win.gBrowser.loadURI(contentPage); });
- });
- }, true);
-}
-
-var badids = [
- "badscript"
-];
-
-function checkLoads(aWindow, aBlocked) {
- var win = aWindow.content;
- is(win.document.getElementById("badscript").dataset.touched, aBlocked ? "no" : "yes", "Should not load tracking javascript");
-}
-
-SpecialPowers.pushPrefEnv(
- {"set" : [["urlclassifier.trackingTable", "test-track-simple"],
- ["privacy.trackingprotection.enabled", true],
- ["browser.safebrowsing.malware.enabled", false],
- ["browser.safebrowsing.phishing.enabled", false],
- ["channelclassifier.allowlist_example", true]]},
- test);
-
-function test() {
- SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
- UrlClassifierTestUtils.addTestTrackers().then(() => {
- // Safe Browsing turned OFF, tracking protection should work nevertheless
- testOnWindow(function(aWindow) {
- checkLoads(aWindow, true);
- aWindow.close();
-
- // Safe Browsing turned ON, tracking protection should still work
- SpecialPowers.setBoolPref("browser.safebrowsing.phishing.enabled", true);
- testOnWindow(function(aWindow) {
- checkLoads(aWindow, true);
- aWindow.close();
- SimpleTest.finish();
- });
- });
- });
-}
-
-SimpleTest.waitForExplicitFinish();
-
-</script>
-
-</pre>
-<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html
deleted file mode 100644
index 29de0dfed..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html
+++ /dev/null
@@ -1,153 +0,0 @@
-<!DOCTYPE HTML>
-<!-- Any copyright is dedicated to the Public Domain.
- http://creativecommons.org/publicdomain/zero/1.0/ -->
-<html>
-<head>
- <title>Test Tracking Protection in Private Browsing mode</title>
- <script type="text/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
-</head>
-
-<body>
-<p id="display"></p>
-<div id="content" style="display: none">
-</div>
-<pre id="test">
-
-<script class="testbody" type="text/javascript">
-
-var Cc = SpecialPowers.Cc;
-var Ci = SpecialPowers.Ci;
-
-var mainWindow = window.QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIWebNavigation)
- .QueryInterface(Ci.nsIDocShellTreeItem)
- .rootTreeItem
- .QueryInterface(Ci.nsIInterfaceRequestor)
- .getInterface(Ci.nsIDOMWindow);
-var contentPage1 = "http://www.itisatrap.org/tests/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html";
-var contentPage2 = "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html";
-
-Components.utils.import("resource://gre/modules/Services.jsm");
-Components.utils.import("resource://testing-common/UrlClassifierTestUtils.jsm");
-
-function whenDelayedStartupFinished(aWindow, aCallback) {
- Services.obs.addObserver(function observer(aSubject, aTopic) {
- if (aWindow == aSubject) {
- Services.obs.removeObserver(observer, aTopic);
- setTimeout(aCallback, 0);
- }
- }, "browser-delayed-startup-finished", false);
-}
-
-function testOnWindow(contentPage, aCallback) {
- var win = mainWindow.OpenBrowserWindow();
- win.addEventListener("load", function onLoad() {
- win.removeEventListener("load", onLoad, false);
- whenDelayedStartupFinished(win, function() {
- win.addEventListener("DOMContentLoaded", function onInnerLoad() {
- if (win.content.location.href != contentPage) {
- win.gBrowser.loadURI(contentPage);
- return;
- }
- win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
-
- win.content.addEventListener('load', function innerLoad2() {
- win.content.removeEventListener('load', innerLoad2, false);
- SimpleTest.executeSoon(function() { aCallback(win); });
- }, false, true);
- }, true);
- SimpleTest.executeSoon(function() { win.gBrowser.loadURI(contentPage); });
- });
- }, true);
-}
-
-var alwaysbadids = [
- "badscript",
-];
-
-function checkLoads(aWindow, aWhitelisted) {
- var win = aWindow.content;
- is(win.document.getElementById("badscript").dataset.touched, "no", "Should not load tracking javascript");
- is(win.document.getElementById("goodscript").dataset.touched, aWhitelisted ? "yes" : "no", "Should load whitelisted tracking javascript");
-
- var badids = alwaysbadids.slice();
- if (!aWhitelisted) {
- badids.push("goodscript");
- }
- is(win.document.blockedTrackingNodeCount, badids.length, "Should identify all tracking elements");
-
- var blockedTrackingNodes = win.document.blockedTrackingNodes;
-
- // Make sure that every node in blockedTrackingNodes exists in the tree
- // (that may not always be the case but do not expect any nodes to disappear
- // from the tree here)
- var allNodeMatch = true;
- for (var i = 0; i < blockedTrackingNodes.length; i++) {
- var nodeMatch = false;
- for (var j = 0; j < badids.length && !nodeMatch; j++) {
- nodeMatch = nodeMatch ||
- (blockedTrackingNodes[i] == win.document.getElementById(badids[j]));
- }
-
- allNodeMatch = allNodeMatch && nodeMatch;
- }
- is(allNodeMatch, true, "All annotated nodes are expected in the tree");
-
- // Make sure that every node with a badid (see badids) is found in the
- // blockedTrackingNodes. This tells us if we are neglecting to annotate
- // some nodes
- allNodeMatch = true;
- for (var j = 0; j < badids.length; j++) {
- var nodeMatch = false;
- for (var i = 0; i < blockedTrackingNodes.length && !nodeMatch; i++) {
- nodeMatch = nodeMatch ||
- (blockedTrackingNodes[i] == win.document.getElementById(badids[j]));
- }
-
- allNodeMatch = allNodeMatch && nodeMatch;
- }
- is(allNodeMatch, true, "All tracking nodes are expected to be annotated as such");
-}
-
-SpecialPowers.pushPrefEnv(
- {"set" : [["privacy.trackingprotection.enabled", true],
- ["channelclassifier.allowlist_example", true]]},
- test);
-
-function test() {
- SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
- UrlClassifierTestUtils.addTestTrackers().then(() => {
- // Load the test from a URL on the whitelist
- testOnWindow(contentPage1, function(aWindow) {
- checkLoads(aWindow, true);
- aWindow.close();
-
- // Load the test from a URL that's NOT on the whitelist
- testOnWindow(contentPage2, function(aWindow) {
- checkLoads(aWindow, false);
- aWindow.close();
-
- // Load the test from a URL on the whitelist but without the whitelist
- SpecialPowers.pushPrefEnv({"set" : [["urlclassifier.trackingWhitelistTable", ""]]},
- function() {
- testOnWindow(contentPage1, function(aWindow) {
- checkLoads(aWindow, false);
- aWindow.close();
- SimpleTest.finish();
- });
- });
-
- });
- });
- });
-}
-
-SimpleTest.waitForExplicitFinish();
-
-</script>
-
-</pre>
-<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/track.html b/toolkit/components/url-classifier/tests/mochitest/track.html
deleted file mode 100644
index 8785e7c5b..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/track.html
+++ /dev/null
@@ -1,7 +0,0 @@
-<html>
- <head>
- </head>
- <body>
- <h1>Tracking Works!</h1>
- </body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js b/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js
deleted file mode 100644
index ac34977d7..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js
+++ /dev/null
@@ -1,3 +0,0 @@
-onmessage = function() {
- postMessage("loaded bad file");
-}
diff --git a/toolkit/components/url-classifier/tests/mochitest/update.sjs b/toolkit/components/url-classifier/tests/mochitest/update.sjs
deleted file mode 100644
index 53efaafdf..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/update.sjs
+++ /dev/null
@@ -1,114 +0,0 @@
-const CC = Components.Constructor;
-const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
- "nsIBinaryInputStream",
- "setInputStream");
-
-function handleRequest(request, response)
-{
- var query = {};
- request.queryString.split('&').forEach(function (val) {
- var idx = val.indexOf('=');
- query[val.slice(0, idx)] = unescape(val.slice(idx + 1));
- });
-
- // Store fullhash in the server side.
- if ("list" in query && "fullhash" in query) {
- // In the server side we will store:
- // 1. All the full hashes for a given list
- // 2. All the lists we have right now
- // data is separate by '\n'
- let list = query["list"];
- let hashes = getState(list);
-
- let hash = base64ToString(query["fullhash"]);
- hashes += hash + "\n";
- setState(list, hashes);
-
- let lists = getState("lists");
- if (lists.indexOf(list) == -1) {
- lists += list + "\n";
- setState("lists", lists);
- }
-
- return;
- }
-
- var body = new BinaryInputStream(request.bodyInputStream);
- var avail;
- var bytes = [];
-
- while ((avail = body.available()) > 0) {
- Array.prototype.push.apply(bytes, body.readByteArray(avail));
- }
-
- var responseBody = parseV2Request(bytes);
-
- response.setHeader("Content-Type", "text/plain", false);
- response.write(responseBody);
-}
-
-function parseV2Request(bytes) {
- var table = String.fromCharCode.apply(this, bytes).slice(0,-2);
-
- var ret = "";
- getState("lists").split("\n").forEach(function(list) {
- if (list == table) {
- var completions = getState(list).split("\n");
- ret += "n:1000\n"
- ret += "i:" + list + "\n";
- ret += "a:1:32:" + 32*(completions.length - 1) + "\n";
-
- for (var completion of completions) {
- ret += completion;
- }
- }
- });
-
- return ret;
-}
-
-/* Convert Base64 data to a string */
-const toBinaryTable = [
- -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
- -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
- -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,62, -1,-1,-1,63,
- 52,53,54,55, 56,57,58,59, 60,61,-1,-1, -1, 0,-1,-1,
- -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14,
- 15,16,17,18, 19,20,21,22, 23,24,25,-1, -1,-1,-1,-1,
- -1,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40,
- 41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1
-];
-const base64Pad = '=';
-
-function base64ToString(data) {
- var result = '';
- var leftbits = 0; // number of bits decoded, but yet to be appended
- var leftdata = 0; // bits decoded, but yet to be appended
-
- // Convert one by one.
- for (var i = 0; i < data.length; i++) {
- var c = toBinaryTable[data.charCodeAt(i) & 0x7f];
- var padding = (data[i] == base64Pad);
- // Skip illegal characters and whitespace
- if (c == -1) continue;
-
- // Collect data into leftdata, update bitcount
- leftdata = (leftdata << 6) | c;
- leftbits += 6;
-
- // If we have 8 or more bits, append 8 bits to the result
- if (leftbits >= 8) {
- leftbits -= 8;
- // Append if not padding.
- if (!padding)
- result += String.fromCharCode((leftdata >> leftbits) & 0xff);
- leftdata &= (1 << leftbits) - 1;
- }
- }
-
- // If there are any bits left, the base64 string was corrupted
- if (leftbits)
- throw Components.Exception('Corrupted base64 string');
-
- return result;
-}
diff --git a/toolkit/components/url-classifier/tests/mochitest/vp9.webm b/toolkit/components/url-classifier/tests/mochitest/vp9.webm
deleted file mode 100644
index 221877e30..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/vp9.webm
+++ /dev/null
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html b/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html
deleted file mode 100644
index 620416fc7..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html
+++ /dev/null
@@ -1,15 +0,0 @@
-<!DOCTYPE HTML>
-<!-- Any copyright is dedicated to the Public Domain.
- http://creativecommons.org/publicdomain/zero/1.0/ -->
-<html>
-<head>
-<title></title>
-</head>
-<body>
-
-<script id="badscript" data-touched="not sure" src="http://trackertest.org/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
-
-<script id="goodscript" data-touched="not sure" src="http://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/good.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
-
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/workerFrame.html b/toolkit/components/url-classifier/tests/mochitest/workerFrame.html
deleted file mode 100644
index 69e8dd007..000000000
--- a/toolkit/components/url-classifier/tests/mochitest/workerFrame.html
+++ /dev/null
@@ -1,65 +0,0 @@
-<html>
-<head>
-<title></title>
-
-<script type="text/javascript">
-
-function startCleanWorker() {
- var worker = new Worker("cleanWorker.js");
-
- worker.onmessage = function(event) {
- if (event.data == "success") {
- window.parent.postMessage("success:blocked importScripts('evilWorker.js')", "*");
- } else {
- window.parent.postMessage("failure:failed to block importScripts('evilWorker.js')", "*");
- }
- window.parent.postMessage("finish", "*");
- };
-
- worker.onerror = function(event) {
- window.parent.postmessage("failure:failed to load cleanWorker.js", "*");
- window.parent.postMessage("finish", "*");
- };
-
- worker.postMessage("");
-}
-
-function startEvilWorker() {
- var worker = new Worker("evilWorker.js");
-
- worker.onmessage = function(event) {
- window.parent.postMessage("failure:failed to block evilWorker.js", "*");
- startUnwantedWorker();
- };
-
- worker.onerror = function(event) {
- window.parent.postMessage("success:blocked evilWorker.js", "*");
- startUnwantedWorker();
- };
-
- worker.postMessage("");
-}
-
-function startUnwantedWorker() {
- var worker = new Worker("unwantedWorker.js");
-
- worker.onmessage = function(event) {
- window.parent.postMessage("failure:failed to block unwantedWorker.js", "*");
- startCleanWorker();
- };
-
- worker.onerror = function(event) {
- window.parent.postMessage("success:blocked unwantedWorker.js", "*");
- startCleanWorker();
- };
-
- worker.postMessage("");
-}
-
-</script>
-
-</head>
-
-<body onload="startEvilWorker()">
-</body>
-</html>
diff --git a/toolkit/components/url-classifier/tests/moz.build b/toolkit/components/url-classifier/tests/moz.build
deleted file mode 100644
index 599727ab9..000000000
--- a/toolkit/components/url-classifier/tests/moz.build
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-MOCHITEST_MANIFESTS += ['mochitest/mochitest.ini']
-MOCHITEST_CHROME_MANIFESTS += ['mochitest/chrome.ini']
-XPCSHELL_TESTS_MANIFESTS += ['unit/xpcshell.ini']
-
-JAR_MANIFESTS += ['jar.mn']
-
-TESTING_JS_MODULES += [
- 'UrlClassifierTestUtils.jsm',
-]
-
-if CONFIG['ENABLE_TESTS']:
- DIRS += ['gtest']
diff --git a/toolkit/components/url-classifier/tests/unit/.eslintrc.js b/toolkit/components/url-classifier/tests/unit/.eslintrc.js
deleted file mode 100644
index d35787cd2..000000000
--- a/toolkit/components/url-classifier/tests/unit/.eslintrc.js
+++ /dev/null
@@ -1,7 +0,0 @@
-"use strict";
-
-module.exports = {
- "extends": [
- "../../../../../testing/xpcshell/xpcshell.eslintrc.js"
- ]
-};
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest1.chunk b/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
deleted file mode 100644
index 3850373c1..000000000
--- a/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
+++ /dev/null
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest2.chunk b/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
deleted file mode 100644
index 738c96f6b..000000000
--- a/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
+++ /dev/null
@@ -1,2 +0,0 @@
-a:5:32:32
-“Ê_Há^˜aÍ7ÂÙ]´=#ÌnmåÃøún‹æo—ÌQ‰ \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js b/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
deleted file mode 100644
index 21849ced7..000000000
--- a/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
+++ /dev/null
@@ -1,429 +0,0 @@
-//* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- *
-function dumpn(s) {
- dump(s + "\n");
-}
-
-const NS_APP_USER_PROFILE_50_DIR = "ProfD";
-const NS_APP_USER_PROFILE_LOCAL_50_DIR = "ProfLD";
-
-var Cc = Components.classes;
-var Ci = Components.interfaces;
-var Cu = Components.utils;
-var Cr = Components.results;
-
-Cu.import("resource://testing-common/httpd.js");
-
-do_get_profile();
-
-var dirSvc = Cc["@mozilla.org/file/directory_service;1"].getService(Ci.nsIProperties);
-
-var iosvc = Cc["@mozilla.org/network/io-service;1"].getService(Ci.nsIIOService);
-
-var secMan = Cc["@mozilla.org/scriptsecuritymanager;1"]
- .getService(Ci.nsIScriptSecurityManager);
-
-// Disable hashcompleter noise for tests
-var prefBranch = Cc["@mozilla.org/preferences-service;1"].
- getService(Ci.nsIPrefBranch);
-prefBranch.setIntPref("urlclassifier.gethashnoise", 0);
-
-// Enable malware/phishing checking for tests
-prefBranch.setBoolPref("browser.safebrowsing.malware.enabled", true);
-prefBranch.setBoolPref("browser.safebrowsing.blockedURIs.enabled", true);
-prefBranch.setBoolPref("browser.safebrowsing.phishing.enabled", true);
-
-// Enable all completions for tests
-prefBranch.setCharPref("urlclassifier.disallow_completions", "");
-
-// Hash completion timeout
-prefBranch.setIntPref("urlclassifier.gethash.timeout_ms", 5000);
-
-function delFile(name) {
- try {
- // Delete a previously created sqlite file
- var file = dirSvc.get('ProfLD', Ci.nsIFile);
- file.append(name);
- if (file.exists())
- file.remove(false);
- } catch(e) {
- }
-}
-
-function cleanUp() {
- delFile("urlclassifier3.sqlite");
- delFile("safebrowsing/classifier.hashkey");
- delFile("safebrowsing/test-phish-simple.sbstore");
- delFile("safebrowsing/test-malware-simple.sbstore");
- delFile("safebrowsing/test-unwanted-simple.sbstore");
- delFile("safebrowsing/test-block-simple.sbstore");
- delFile("safebrowsing/test-track-simple.sbstore");
- delFile("safebrowsing/test-trackwhite-simple.sbstore");
- delFile("safebrowsing/test-phish-simple.pset");
- delFile("safebrowsing/test-malware-simple.pset");
- delFile("safebrowsing/test-unwanted-simple.pset");
- delFile("safebrowsing/test-block-simple.pset");
- delFile("safebrowsing/test-track-simple.pset");
- delFile("safebrowsing/test-trackwhite-simple.pset");
- delFile("safebrowsing/moz-phish-simple.sbstore");
- delFile("safebrowsing/moz-phish-simple.pset");
- delFile("testLarge.pset");
- delFile("testNoDelta.pset");
-}
-
-// Update uses allTables by default
-var allTables = "test-phish-simple,test-malware-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple";
-var mozTables = "moz-phish-simple";
-
-var dbservice = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(Ci.nsIUrlClassifierDBService);
-var streamUpdater = Cc["@mozilla.org/url-classifier/streamupdater;1"]
- .getService(Ci.nsIUrlClassifierStreamUpdater);
-
-
-/*
- * Builds an update from an object that looks like:
- *{ "test-phish-simple" : [{
- * "chunkType" : "a", // 'a' is assumed if not specified
- * "chunkNum" : 1, // numerically-increasing chunk numbers are assumed
- * // if not specified
- * "urls" : [ "foo.com/a", "foo.com/b", "bar.com/" ]
- * }
- */
-
-function buildUpdate(update, hashSize) {
- if (!hashSize) {
- hashSize = 32;
- }
- var updateStr = "n:1000\n";
-
- for (var tableName in update) {
- if (tableName != "")
- updateStr += "i:" + tableName + "\n";
- var chunks = update[tableName];
- for (var j = 0; j < chunks.length; j++) {
- var chunk = chunks[j];
- var chunkType = chunk.chunkType ? chunk.chunkType : 'a';
- var chunkNum = chunk.chunkNum ? chunk.chunkNum : j;
- updateStr += chunkType + ':' + chunkNum + ':' + hashSize;
-
- if (chunk.urls) {
- var chunkData = chunk.urls.join("\n");
- updateStr += ":" + chunkData.length + "\n" + chunkData;
- }
-
- updateStr += "\n";
- }
- }
-
- return updateStr;
-}
-
-function buildPhishingUpdate(chunks, hashSize) {
- return buildUpdate({"test-phish-simple" : chunks}, hashSize);
-}
-
-function buildMalwareUpdate(chunks, hashSize) {
- return buildUpdate({"test-malware-simple" : chunks}, hashSize);
-}
-
-function buildUnwantedUpdate(chunks, hashSize) {
- return buildUpdate({"test-unwanted-simple" : chunks}, hashSize);
-}
-
-function buildBlockedUpdate(chunks, hashSize) {
- return buildUpdate({"test-block-simple" : chunks}, hashSize);
-}
-
-function buildMozPhishingUpdate(chunks, hashSize) {
- return buildUpdate({"moz-phish-simple" : chunks}, hashSize);
-}
-
-function buildBareUpdate(chunks, hashSize) {
- return buildUpdate({"" : chunks}, hashSize);
-}
-
-/**
- * Performs an update of the dbservice manually, bypassing the stream updater
- */
-function doSimpleUpdate(updateText, success, failure) {
- var listener = {
- QueryInterface: function(iid)
- {
- if (iid.equals(Ci.nsISupports) ||
- iid.equals(Ci.nsIUrlClassifierUpdateObserver))
- return this;
- throw Cr.NS_ERROR_NO_INTERFACE;
- },
-
- updateUrlRequested: function(url) { },
- streamFinished: function(status) { },
- updateError: function(errorCode) { failure(errorCode); },
- updateSuccess: function(requestedTimeout) { success(requestedTimeout); }
- };
-
- dbservice.beginUpdate(listener, allTables);
- dbservice.beginStream("", "");
- dbservice.updateStream(updateText);
- dbservice.finishStream();
- dbservice.finishUpdate();
-}
-
-/**
- * Simulates a failed database update.
- */
-function doErrorUpdate(tables, success, failure) {
- var listener = {
- QueryInterface: function(iid)
- {
- if (iid.equals(Ci.nsISupports) ||
- iid.equals(Ci.nsIUrlClassifierUpdateObserver))
- return this;
- throw Cr.NS_ERROR_NO_INTERFACE;
- },
-
- updateUrlRequested: function(url) { },
- streamFinished: function(status) { },
- updateError: function(errorCode) { success(errorCode); },
- updateSuccess: function(requestedTimeout) { failure(requestedTimeout); }
- };
-
- dbservice.beginUpdate(listener, tables, null);
- dbservice.beginStream("", "");
- dbservice.cancelUpdate();
-}
-
-/**
- * Performs an update of the dbservice using the stream updater and a
- * data: uri
- */
-function doStreamUpdate(updateText, success, failure, downloadFailure) {
- var dataUpdate = "data:," + encodeURIComponent(updateText);
-
- if (!downloadFailure) {
- downloadFailure = failure;
- }
-
- streamUpdater.downloadUpdates(allTables, "", true,
- dataUpdate, success, failure, downloadFailure);
-}
-
-var gAssertions = {
-
-tableData : function(expectedTables, cb)
-{
- dbservice.getTables(function(tables) {
- // rebuild the tables in a predictable order.
- var parts = tables.split("\n");
- while (parts[parts.length - 1] == '') {
- parts.pop();
- }
- parts.sort();
- tables = parts.join("\n");
-
- do_check_eq(tables, expectedTables);
- cb();
- });
-},
-
-checkUrls: function(urls, expected, cb, useMoz = false)
-{
- // work with a copy of the list.
- urls = urls.slice(0);
- var doLookup = function() {
- if (urls.length > 0) {
- var tables = useMoz ? mozTables : allTables;
- var fragment = urls.shift();
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + fragment, null, null), {});
- dbservice.lookup(principal, tables,
- function(arg) {
- do_check_eq(expected, arg);
- doLookup();
- }, true);
- } else {
- cb();
- }
- };
- doLookup();
-},
-
-checkTables: function(url, expected, cb)
-{
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + url, null, null), {});
- dbservice.lookup(principal, allTables, function(tables) {
- // Rebuild tables in a predictable order.
- var parts = tables.split(",");
- while (parts[parts.length - 1] == '') {
- parts.pop();
- }
- parts.sort();
- tables = parts.join(",");
- do_check_eq(tables, expected);
- cb();
- }, true);
-},
-
-urlsDontExist: function(urls, cb)
-{
- this.checkUrls(urls, '', cb);
-},
-
-urlsExist: function(urls, cb)
-{
- this.checkUrls(urls, 'test-phish-simple', cb);
-},
-
-malwareUrlsExist: function(urls, cb)
-{
- this.checkUrls(urls, 'test-malware-simple', cb);
-},
-
-unwantedUrlsExist: function(urls, cb)
-{
- this.checkUrls(urls, 'test-unwanted-simple', cb);
-},
-
-blockedUrlsExist: function(urls, cb)
-{
- this.checkUrls(urls, 'test-block-simple', cb);
-},
-
-mozPhishingUrlsExist: function(urls, cb)
-{
- this.checkUrls(urls, 'moz-phish-simple', cb, true);
-},
-
-subsDontExist: function(urls, cb)
-{
- // XXX: there's no interface for checking items in the subs table
- cb();
-},
-
-subsExist: function(urls, cb)
-{
- // XXX: there's no interface for checking items in the subs table
- cb();
-},
-
-urlExistInMultipleTables: function(data, cb)
-{
- this.checkTables(data["url"], data["tables"], cb);
-}
-
-};
-
-/**
- * Check a set of assertions against the gAssertions table.
- */
-function checkAssertions(assertions, doneCallback)
-{
- var checkAssertion = function() {
- for (var i in assertions) {
- var data = assertions[i];
- delete assertions[i];
- gAssertions[i](data, checkAssertion);
- return;
- }
-
- doneCallback();
- }
-
- checkAssertion();
-}
-
-function updateError(arg)
-{
- do_throw(arg);
-}
-
-// Runs a set of updates, and then checks a set of assertions.
-function doUpdateTest(updates, assertions, successCallback, errorCallback) {
- var errorUpdate = function() {
- checkAssertions(assertions, errorCallback);
- }
-
- var runUpdate = function() {
- if (updates.length > 0) {
- var update = updates.shift();
- doStreamUpdate(update, runUpdate, errorUpdate, null);
- } else {
- checkAssertions(assertions, successCallback);
- }
- }
-
- runUpdate();
-}
-
-var gTests;
-var gNextTest = 0;
-
-function runNextTest()
-{
- if (gNextTest >= gTests.length) {
- do_test_finished();
- return;
- }
-
- dbservice.resetDatabase();
- dbservice.setHashCompleter('test-phish-simple', null);
-
- let test = gTests[gNextTest++];
- dump("running " + test.name + "\n");
- test();
-}
-
-function runTests(tests)
-{
- gTests = tests;
- runNextTest();
-}
-
-var timerArray = [];
-
-function Timer(delay, cb) {
- this.cb = cb;
- var timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
- timer.initWithCallback(this, delay, timer.TYPE_ONE_SHOT);
- timerArray.push(timer);
-}
-
-Timer.prototype = {
-QueryInterface: function(iid) {
- if (!iid.equals(Ci.nsISupports) && !iid.equals(Ci.nsITimerCallback)) {
- throw Cr.NS_ERROR_NO_INTERFACE;
- }
- return this;
- },
-notify: function(timer) {
- this.cb();
- }
-}
-
-// LFSRgenerator is a 32-bit linear feedback shift register random number
-// generator. It is highly predictable and is not intended to be used for
-// cryptography but rather to allow easier debugging than a test that uses
-// Math.random().
-function LFSRgenerator(seed) {
- // Force |seed| to be a number.
- seed = +seed;
- // LFSR generators do not work with a value of 0.
- if (seed == 0)
- seed = 1;
-
- this._value = seed;
-}
-LFSRgenerator.prototype = {
- // nextNum returns a random unsigned integer of in the range [0,2^|bits|].
- nextNum: function(bits) {
- if (!bits)
- bits = 32;
-
- let val = this._value;
- // Taps are 32, 22, 2 and 1.
- let bit = ((val >>> 0) ^ (val >>> 10) ^ (val >>> 30) ^ (val >>> 31)) & 1;
- val = (val >>> 1) | (bit << 31);
- this._value = val;
-
- return (val >>> (32 - bits));
- },
-};
-
-cleanUp();
diff --git a/toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js b/toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js
deleted file mode 100644
index 37f39d1a8..000000000
--- a/toolkit/components/url-classifier/tests/unit/tail_urlclassifier.js
+++ /dev/null
@@ -1 +0,0 @@
-cleanUp();
diff --git a/toolkit/components/url-classifier/tests/unit/test_addsub.js b/toolkit/components/url-classifier/tests/unit/test_addsub.js
deleted file mode 100644
index 1ed65c7ba..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_addsub.js
+++ /dev/null
@@ -1,488 +0,0 @@
-
-function doTest(updates, assertions)
-{
- doUpdateTest(updates, assertions, runNextTest, updateError);
-}
-
-// Test an add of two urls to a fresh database
-function testSimpleAdds() {
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : addUrls
- };
-
- doTest([update], assertions);
-}
-
-// Same as testSimpleAdds, but make the same-domain URLs come from different
-// chunks.
-function testMultipleAdds() {
- var add1Urls = [ "foo.com/a", "bar.com/c" ];
- var add2Urls = [ "foo.com/b" ];
-
- var update = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls },
- { "chunkNum" : 2,
- "urls" : add2Urls }]);
- var assertions = {
- "tableData" : "test-phish-simple;a:1-2",
- "urlsExist" : add1Urls.concat(add2Urls)
- };
-
- doTest([update], assertions);
-}
-
-// Test that a sub will remove an existing add
-function testSimpleSub()
-{
- var addUrls = ["foo.com/a", "bar.com/b"];
- var subUrls = ["1:foo.com/a"];
-
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
- "urls": addUrls }]);
-
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 50,
- "chunkType" : "s",
- "urls": subUrls }]);
-
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1:s:50",
- "urlsExist" : [ "bar.com/b" ],
- "urlsDontExist": ["foo.com/a" ],
- "subsDontExist" : [ "foo.com/a" ]
- }
-
- doTest([addUpdate, subUpdate], assertions);
-
-}
-
-// Same as testSimpleSub(), but the sub comes in before the add.
-function testSubEmptiesAdd()
-{
- var subUrls = ["1:foo.com/a"];
- var addUrls = ["foo.com/a", "bar.com/b"];
-
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 50,
- "chunkType" : "s",
- "urls": subUrls }]);
-
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls": addUrls }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1:s:50",
- "urlsExist" : [ "bar.com/b" ],
- "urlsDontExist": ["foo.com/a" ],
- "subsDontExist" : [ "foo.com/a" ] // this sub was found, it shouldn't exist anymore
- }
-
- doTest([subUpdate, addUpdate], assertions);
-}
-
-// Very similar to testSubEmptiesAdd, except that the domain entry will
-// still have an item left over that needs to be synced.
-function testSubPartiallyEmptiesAdd()
-{
- var subUrls = ["1:foo.com/a"];
- var addUrls = ["foo.com/a", "foo.com/b", "bar.com/b"];
-
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "s",
- "urls": subUrls }]);
-
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
- "urls": addUrls }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1:s:1",
- "urlsExist" : [ "foo.com/b", "bar.com/b" ],
- "urlsDontExist" : ["foo.com/a" ],
- "subsDontExist" : [ "foo.com/a" ] // this sub was found, it shouldn't exist anymore
- }
-
- doTest([subUpdate, addUpdate], assertions);
-}
-
-// We SHOULD be testing that pending subs are removed using
-// subsDontExist assertions. Since we don't have a good interface for getting
-// at sub entries, we'll verify it by side-effect. Subbing a url once
-// then adding it twice should leave the url intact.
-function testPendingSubRemoved()
-{
- var subUrls = ["1:foo.com/a", "2:foo.com/b"];
- var addUrls = ["foo.com/a", "foo.com/b"];
-
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "s",
- "urls": subUrls }]);
-
- var addUpdate1 = buildPhishingUpdate(
- [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
- "urls": addUrls }]);
-
- var addUpdate2 = buildPhishingUpdate(
- [{ "chunkNum" : 2,
- "urls": addUrls }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1-2:s:1",
- "urlsExist" : [ "foo.com/a", "foo.com/b" ],
- "subsDontExist" : [ "foo.com/a", "foo.com/b" ] // this sub was found, it shouldn't exist anymore
- }
-
- doTest([subUpdate, addUpdate1, addUpdate2], assertions);
-}
-
-// Make sure that a saved sub is removed when the sub chunk is expired.
-function testPendingSubExpire()
-{
- var subUrls = ["1:foo.com/a", "1:foo.com/b"];
- var addUrls = ["foo.com/a", "foo.com/b"];
-
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "s",
- "urls": subUrls }]);
-
- var expireUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "sd" }]);
-
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1, // adds and subtracts don't share a chunk numbering space
- "urls": addUrls }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : [ "foo.com/a", "foo.com/b" ],
- "subsDontExist" : [ "foo.com/a", "foo.com/b" ] // this sub was expired
- }
-
- doTest([subUpdate, expireUpdate, addUpdate], assertions);
-}
-
-// Make sure that the sub url removes from only the chunk that it specifies
-function testDuplicateAdds()
-{
- var urls = ["foo.com/a"];
-
- var addUpdate1 = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls": urls }]);
- var addUpdate2 = buildPhishingUpdate(
- [{ "chunkNum" : 2,
- "urls": urls }]);
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 3,
- "chunkType" : "s",
- "urls": ["2:foo.com/a"]}]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1-2:s:3",
- "urlsExist" : [ "foo.com/a"],
- "subsDontExist" : [ "foo.com/a"]
- }
-
- doTest([addUpdate1, addUpdate2, subUpdate], assertions);
-}
-
-// Tests a sub which matches some existing adds but leaves others.
-function testSubPartiallyMatches()
-{
- var subUrls = ["foo.com/a"];
- var addUrls = ["1:foo.com/a", "2:foo.com/b"];
-
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls" : addUrls }]);
-
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "s",
- "urls" : addUrls }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1:s:1",
- "urlsDontExist" : ["foo.com/a"],
- "subsDontExist" : ["foo.com/a"],
- "subsExist" : ["foo.com/b"]
- };
-
- doTest([addUpdate, subUpdate], assertions);
-}
-
-// XXX: because subsExist isn't actually implemented, this is the same
-// test as above but with a second add chunk that should fail to be added
-// because of a pending sub chunk.
-function testSubPartiallyMatches2()
-{
- var addUrls = ["foo.com/a"];
- var subUrls = ["1:foo.com/a", "2:foo.com/b"];
- var addUrls2 = ["foo.com/b"];
-
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls" : addUrls }]);
-
- var subUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "s",
- "urls" : subUrls }]);
-
- var addUpdate2 = buildPhishingUpdate(
- [{ "chunkNum" : 2,
- "urls" : addUrls2 }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1-2:s:1",
- "urlsDontExist" : ["foo.com/a", "foo.com/b"],
- "subsDontExist" : ["foo.com/a", "foo.com/b"]
- };
-
- doTest([addUpdate, subUpdate, addUpdate2], assertions);
-}
-
-// Verify that two subs for the same domain but from different chunks
-// match (tests that existing sub entries are properly updated)
-function testSubsDifferentChunks() {
- var subUrls1 = [ "3:foo.com/a" ];
- var subUrls2 = [ "3:foo.com/b" ];
-
- var addUrls = [ "foo.com/a", "foo.com/b", "foo.com/c" ];
-
- var subUpdate1 = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "s",
- "urls": subUrls1 }]);
- var subUpdate2 = buildPhishingUpdate(
- [{ "chunkNum" : 2,
- "chunkType" : "s",
- "urls" : subUrls2 }]);
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 3,
- "urls" : addUrls }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:3:s:1-2",
- "urlsExist" : [ "foo.com/c" ],
- "urlsDontExist" : [ "foo.com/a", "foo.com/b" ],
- "subsDontExist" : [ "foo.com/a", "foo.com/b" ]
- };
-
- doTest([subUpdate1, subUpdate2, addUpdate], assertions);
-}
-
-// for bug 534079
-function testSubsDifferentChunksSameHostId() {
- var subUrls1 = [ "1:foo.com/a" ];
- var subUrls2 = [ "1:foo.com/b", "2:foo.com/c" ];
-
- var addUrls = [ "foo.com/a", "foo.com/b" ];
- var addUrls2 = [ "foo.com/c" ];
-
- var subUpdate1 = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "chunkType" : "s",
- "urls": subUrls1 }]);
- var subUpdate2 = buildPhishingUpdate(
- [{ "chunkNum" : 2,
- "chunkType" : "s",
- "urls" : subUrls2 }]);
-
- var addUpdate = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls" : addUrls }]);
- var addUpdate2 = buildPhishingUpdate(
- [{ "chunkNum" : 2,
- "urls" : addUrls2 }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1-2:s:1-2",
- "urlsDontExist" : [ "foo.com/c", "foo.com/b", "foo.com/a", ],
- };
-
- doTest([addUpdate, addUpdate2, subUpdate1, subUpdate2], assertions);
-}
-
-// Test lists of expired chunks
-function testExpireLists() {
- var addUpdate = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : [ "foo.com/a" ]
- },
- { "chunkNum" : 3,
- "urls" : [ "bar.com/a" ]
- },
- { "chunkNum" : 4,
- "urls" : [ "baz.com/a" ]
- },
- { "chunkNum" : 5,
- "urls" : [ "blah.com/a" ]
- },
- ]);
- var subUpdate = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "chunkType" : "s",
- "urls" : [ "50:foo.com/1" ]
- },
- { "chunkNum" : 2,
- "chunkType" : "s",
- "urls" : [ "50:bar.com/1" ]
- },
- { "chunkNum" : 3,
- "chunkType" : "s",
- "urls" : [ "50:baz.com/1" ]
- },
- { "chunkNum" : 5,
- "chunkType" : "s",
- "urls" : [ "50:blah.com/1" ]
- },
- ]);
-
- var expireUpdate = buildPhishingUpdate(
- [ { "chunkType" : "ad:1,3-5" },
- { "chunkType" : "sd:1-3,5" }]);
-
- var assertions = {
- // "tableData" : "test-phish-simple;"
- "tableData": ""
- };
-
- doTest([addUpdate, subUpdate, expireUpdate], assertions);
-}
-
-// Test a duplicate add chunk.
-function testDuplicateAddChunks() {
- var addUrls1 = [ "foo.com/a" ];
- var addUrls2 = [ "bar.com/b" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls1
- },
- { "chunkNum" : 1,
- "urls" : addUrls2
- }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : addUrls1,
- "urlsDontExist" : addUrls2
- };
-
- doTest([update], assertions);
-}
-
-// This test is a bit tricky. We want to test that an add removes all
-// subs with the same add chunk id, even if there is no match. To do
-// that we need to add the same add chunk twice, with an expiration
-// in the middle. This would be easier if subsDontExist actually
-// worked...
-function testExpireWholeSub()
-{
- var subUrls = ["1:foo.com/a"];
-
- var update = buildPhishingUpdate(
- [{ "chunkNum" : 5,
- "chunkType" : "s",
- "urls" : subUrls
- },
- // empty add chunk should still cause foo.com/a to go away.
- { "chunkNum" : 1,
- "urls" : []
- },
- // and now adding chunk 1 again with foo.com/a should succeed,
- // because the sub should have been expired with the empty
- // add chunk.
-
- // we need to expire this chunk to let us add chunk 1 again.
- {
- "chunkType" : "ad:1"
- },
- { "chunkNum" : 1,
- "urls" : [ "foo.com/a" ]
- }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1:s:5",
- "urlsExist" : ["foo.com/a"]
- };
-
- doTest([update], assertions);
-}
-
-
-// This test is roughly the opposite of testExpireWholeSub(). We add
-// the empty add first, and make sure that it prevents a sub for that
-// add from being applied.
-function testPreventWholeSub()
-{
- var subUrls = ["1:foo.com/a"];
-
- var update = buildPhishingUpdate(
- [ // empty add chunk should cause foo.com/a to not be saved
- { "chunkNum" : 1,
- "urls" : []
- },
- { "chunkNum" : 5,
- "chunkType" : "s",
- "urls" : subUrls
- },
- // and now adding chunk 1 again with foo.com/a should succeed,
- // because the sub should have been expired with the empty
- // add chunk.
-
- // we need to expire this chunk to let us add chunk 1 again.
- {
- "chunkType" : "ad:1"
- },
- { "chunkNum" : 1,
- "urls" : [ "foo.com/a" ]
- }]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1:s:5",
- "urlsExist" : ["foo.com/a"]
- };
-
- doTest([update], assertions);
-}
-
-function run_test()
-{
- runTests([
- testSimpleAdds,
- testMultipleAdds,
- testSimpleSub,
- testSubEmptiesAdd,
- testSubPartiallyEmptiesAdd,
- testPendingSubRemoved,
- testPendingSubExpire,
- testDuplicateAdds,
- testSubPartiallyMatches,
- testSubPartiallyMatches2,
- testSubsDifferentChunks,
- testSubsDifferentChunksSameHostId,
- testExpireLists
- ]);
-}
-
-do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_backoff.js b/toolkit/components/url-classifier/tests/unit/test_backoff.js
deleted file mode 100644
index 365568c47..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_backoff.js
+++ /dev/null
@@ -1,89 +0,0 @@
-// Some unittests (e.g., paste into JS shell)
-var jslib = Cc["@mozilla.org/url-classifier/jslib;1"].
- getService().wrappedJSObject;
-var _Datenow = jslib.Date.now;
-function setNow(time) {
- jslib.Date.now = function() {
- return time;
- }
-}
-
-function run_test() {
- // 3 errors, 1ms retry period, max 3 requests per ten milliseconds,
- // 5ms backoff interval, 19ms max delay
- var rb = new jslib.RequestBackoff(3, 1, 3, 10, 5, 19);
- setNow(1);
- rb.noteServerResponse(200);
- do_check_true(rb.canMakeRequest());
- setNow(2);
- do_check_true(rb.canMakeRequest());
-
- // First error should trigger a 1ms delay
- rb.noteServerResponse(500);
- do_check_false(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 3);
- setNow(3);
- do_check_true(rb.canMakeRequest());
-
- // Second error should also trigger a 1ms delay
- rb.noteServerResponse(500);
- do_check_false(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 4);
- setNow(4);
- do_check_true(rb.canMakeRequest());
-
- // Third error should trigger a 5ms backoff
- rb.noteServerResponse(500);
- do_check_false(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 9);
- setNow(9);
- do_check_true(rb.canMakeRequest());
-
- // Trigger backoff again
- rb.noteServerResponse(503);
- do_check_false(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 19);
- setNow(19);
- do_check_true(rb.canMakeRequest());
-
- // Trigger backoff a third time and hit max timeout
- rb.noteServerResponse(302);
- do_check_false(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 38);
- setNow(38);
- do_check_true(rb.canMakeRequest());
-
- // One more backoff, should still be at the max timeout
- rb.noteServerResponse(400);
- do_check_false(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 57);
- setNow(57);
- do_check_true(rb.canMakeRequest());
-
- // Request goes through
- rb.noteServerResponse(200);
- do_check_true(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 0);
- setNow(58);
- rb.noteServerResponse(500);
-
- // Another error, should trigger a 1ms backoff
- do_check_false(rb.canMakeRequest());
- do_check_eq(rb.nextRequestTime_, 59);
-
- setNow(59);
- do_check_true(rb.canMakeRequest());
-
- setNow(200);
- rb.noteRequest();
- setNow(201);
- rb.noteRequest();
- setNow(202);
- do_check_true(rb.canMakeRequest());
- rb.noteRequest();
- do_check_false(rb.canMakeRequest());
- setNow(211);
- do_check_true(rb.canMakeRequest());
-
- jslib.Date.now = _Datenow;
-}
diff --git a/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js b/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
deleted file mode 100644
index 037bc7b88..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
+++ /dev/null
@@ -1,32 +0,0 @@
-Cu.import("resource://gre/modules/SafeBrowsing.jsm");
-Cu.import("resource://gre/modules/Services.jsm");
-Cu.import("resource://testing-common/AppInfo.jsm");
-
-// 'Cc["@mozilla.org/xre/app-info;1"]' for xpcshell has no nsIXULAppInfo
-// so that we have to update it to make nsURLFormatter.js happy.
-// (SafeBrowsing.init() will indirectly use nsURLFormatter.js)
-updateAppInfo();
-
-function run_test() {
- SafeBrowsing.init();
-
- let origList = Services.prefs.getCharPref("browser.safebrowsing.provider.google.lists");
-
- // Remove 'goog-malware-shavar' from the original.
- let trimmedList = origList.replace('goog-malware-shavar,', '');
- Services.prefs.setCharPref("browser.safebrowsing.provider.google.lists", trimmedList);
-
- try {
- // Bug 1274685 - Unowned Safe Browsing tables break list updates
- //
- // If SafeBrowsing.registerTableWithURLs() doesn't check if
- // a provider is found before registering table, an exception
- // will be thrown while accessing a null object.
- //
- SafeBrowsing.registerTables();
- } catch (e) {
- ok(false, 'Exception thrown due to ' + e.toString());
- }
-
- Services.prefs.setCharPref("browser.safebrowsing.provider.google.lists", origList);
-}
diff --git a/toolkit/components/url-classifier/tests/unit/test_dbservice.js b/toolkit/components/url-classifier/tests/unit/test_dbservice.js
deleted file mode 100644
index 4b01e7016..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_dbservice.js
+++ /dev/null
@@ -1,314 +0,0 @@
-var checkUrls = [];
-var checkExpect;
-
-var chunk1Urls = [
- "test.com/aba",
- "test.com/foo/bar",
- "foo.bar.com/a/b/c"
-];
-var chunk1 = chunk1Urls.join("\n");
-
-var chunk2Urls = [
- "blah.com/a",
- "baz.com/",
- "255.255.0.1/",
- "www.foo.com/test2?param=1"
-];
-var chunk2 = chunk2Urls.join("\n");
-
-var chunk3Urls = [
- "test.com/a",
- "foo.bar.com/a",
- "blah.com/a",
- ];
-var chunk3 = chunk3Urls.join("\n");
-
-var chunk3SubUrls = [
- "1:test.com/a",
- "1:foo.bar.com/a",
- "2:blah.com/a" ];
-var chunk3Sub = chunk3SubUrls.join("\n");
-
-var chunk4Urls = [
- "a.com/b",
- "b.com/c",
- ];
-var chunk4 = chunk4Urls.join("\n");
-
-var chunk5Urls = [
- "d.com/e",
- "f.com/g",
- ];
-var chunk5 = chunk5Urls.join("\n");
-
-var chunk6Urls = [
- "h.com/i",
- "j.com/k",
- ];
-var chunk6 = chunk6Urls.join("\n");
-
-var chunk7Urls = [
- "l.com/m",
- "n.com/o",
- ];
-var chunk7 = chunk7Urls.join("\n");
-
-// we are going to add chunks 1, 2, 4, 5, and 6 to phish-simple,
-// chunk 2 to malware-simple, and chunk 3 to unwanted-simple,
-// and chunk 7 to block-simple.
-// Then we'll remove the urls in chunk3 from phish-simple, then
-// expire chunk 1 and chunks 4-7 from phish-simple.
-var phishExpected = {};
-var phishUnexpected = {};
-var malwareExpected = {};
-var unwantedExpected = {};
-var blockedExpected = {};
-for (var i = 0; i < chunk2Urls.length; i++) {
- phishExpected[chunk2Urls[i]] = true;
- malwareExpected[chunk2Urls[i]] = true;
-}
-for (var i = 0; i < chunk3Urls.length; i++) {
- unwantedExpected[chunk3Urls[i]] = true;
- delete phishExpected[chunk3Urls[i]];
- phishUnexpected[chunk3Urls[i]] = true;
-}
-for (var i = 0; i < chunk1Urls.length; i++) {
- // chunk1 urls are expired
- phishUnexpected[chunk1Urls[i]] = true;
-}
-for (var i = 0; i < chunk4Urls.length; i++) {
- // chunk4 urls are expired
- phishUnexpected[chunk4Urls[i]] = true;
-}
-for (var i = 0; i < chunk5Urls.length; i++) {
- // chunk5 urls are expired
- phishUnexpected[chunk5Urls[i]] = true;
-}
-for (var i = 0; i < chunk6Urls.length; i++) {
- // chunk6 urls are expired
- phishUnexpected[chunk6Urls[i]] = true;
-}
-for (var i = 0; i < chunk7Urls.length; i++) {
- blockedExpected[chunk7Urls[i]] = true;
- // chunk7 urls are expired
- phishUnexpected[chunk7Urls[i]] = true;
-}
-
-// Check that the entries hit based on sub-parts
-phishExpected["baz.com/foo/bar"] = true;
-phishExpected["foo.bar.baz.com/foo"] = true;
-phishExpected["bar.baz.com/"] = true;
-
-var numExpecting;
-
-function testFailure(arg) {
- do_throw(arg);
-}
-
-function checkNoHost()
-{
- // Looking up a no-host uri such as a data: uri should throw an exception.
- var exception;
- try {
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("data:text/html,<b>test</b>", null, null), {});
- dbservice.lookup(principal, allTables);
-
- exception = false;
- } catch(e) {
- exception = true;
- }
- do_check_true(exception);
-
- do_test_finished();
-}
-
-function tablesCallbackWithoutSub(tables)
-{
- var parts = tables.split("\n");
- parts.sort();
-
- // there's a leading \n here because splitting left an empty string
- // after the trailing newline, which will sort first
- do_check_eq(parts.join("\n"),
- "\ntest-block-simple;a:1\ntest-malware-simple;a:1\ntest-phish-simple;a:2\ntest-unwanted-simple;a:1");
-
- checkNoHost();
-}
-
-
-function expireSubSuccess(result) {
- dbservice.getTables(tablesCallbackWithoutSub);
-}
-
-function tablesCallbackWithSub(tables)
-{
- var parts = tables.split("\n");
- parts.sort();
-
- // there's a leading \n here because splitting left an empty string
- // after the trailing newline, which will sort first
- do_check_eq(parts.join("\n"),
- "\ntest-block-simple;a:1\ntest-malware-simple;a:1\ntest-phish-simple;a:2:s:3\ntest-unwanted-simple;a:1");
-
- // verify that expiring a sub chunk removes its name from the list
- var data =
- "n:1000\n" +
- "i:test-phish-simple\n" +
- "sd:3\n";
-
- doSimpleUpdate(data, expireSubSuccess, testFailure);
-}
-
-function checkChunksWithSub()
-{
- dbservice.getTables(tablesCallbackWithSub);
-}
-
-function checkDone() {
- if (--numExpecting == 0)
- checkChunksWithSub();
-}
-
-function phishExists(result) {
- dumpn("phishExists: " + result);
- try {
- do_check_true(result.indexOf("test-phish-simple") != -1);
- } finally {
- checkDone();
- }
-}
-
-function phishDoesntExist(result) {
- dumpn("phishDoesntExist: " + result);
- try {
- do_check_true(result.indexOf("test-phish-simple") == -1);
- } finally {
- checkDone();
- }
-}
-
-function malwareExists(result) {
- dumpn("malwareExists: " + result);
-
- try {
- do_check_true(result.indexOf("test-malware-simple") != -1);
- } finally {
- checkDone();
- }
-}
-
-function unwantedExists(result) {
- dumpn("unwantedExists: " + result);
-
- try {
- do_check_true(result.indexOf("test-unwanted-simple") != -1);
- } finally {
- checkDone();
- }
-}
-
-function blockedExists(result) {
- dumpn("blockedExists: " + result);
-
- try {
- do_check_true(result.indexOf("test-block-simple") != -1);
- } finally {
- checkDone();
- }
-}
-
-function checkState()
-{
- numExpecting = 0;
-
-
- for (var key in phishExpected) {
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
- dbservice.lookup(principal, allTables, phishExists, true);
- numExpecting++;
- }
-
- for (var key in phishUnexpected) {
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
- dbservice.lookup(principal, allTables, phishDoesntExist, true);
- numExpecting++;
- }
-
- for (var key in malwareExpected) {
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
- dbservice.lookup(principal, allTables, malwareExists, true);
- numExpecting++;
- }
-
- for (var key in unwantedExpected) {
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
- dbservice.lookup(principal, allTables, unwantedExists, true);
- numExpecting++;
- }
-
- for (var key in blockedExpected) {
- var principal = secMan.createCodebasePrincipal(iosvc.newURI("http://" + key, null, null), {});
- dbservice.lookup(principal, allTables, blockedExists, true);
- numExpecting++;
- }
-}
-
-function testSubSuccess(result)
-{
- do_check_eq(result, "1000");
- checkState();
-}
-
-function do_subs() {
- var data =
- "n:1000\n" +
- "i:test-phish-simple\n" +
- "s:3:32:" + chunk3Sub.length + "\n" +
- chunk3Sub + "\n" +
- "ad:1\n" +
- "ad:4-6\n";
-
- doSimpleUpdate(data, testSubSuccess, testFailure);
-}
-
-function testAddSuccess(arg) {
- do_check_eq(arg, "1000");
-
- do_subs();
-}
-
-function do_adds() {
- // This test relies on the fact that only -regexp tables are ungzipped,
- // and only -hash tables are assumed to be pre-md5'd. So we use
- // a 'simple' table type to get simple hostname-per-line semantics.
-
- var data =
- "n:1000\n" +
- "i:test-phish-simple\n" +
- "a:1:32:" + chunk1.length + "\n" +
- chunk1 + "\n" +
- "a:2:32:" + chunk2.length + "\n" +
- chunk2 + "\n" +
- "a:4:32:" + chunk4.length + "\n" +
- chunk4 + "\n" +
- "a:5:32:" + chunk5.length + "\n" +
- chunk5 + "\n" +
- "a:6:32:" + chunk6.length + "\n" +
- chunk6 + "\n" +
- "i:test-malware-simple\n" +
- "a:1:32:" + chunk2.length + "\n" +
- chunk2 + "\n" +
- "i:test-unwanted-simple\n" +
- "a:1:32:" + chunk3.length + "\n" +
- chunk3 + "\n" +
- "i:test-block-simple\n" +
- "a:1:32:" + chunk7.length + "\n" +
- chunk7 + "\n";
-
- doSimpleUpdate(data, testAddSuccess, testFailure);
-}
-
-function run_test() {
- do_adds();
- do_test_pending();
-}
diff --git a/toolkit/components/url-classifier/tests/unit/test_digest256.js b/toolkit/components/url-classifier/tests/unit/test_digest256.js
deleted file mode 100644
index 6ae652915..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_digest256.js
+++ /dev/null
@@ -1,147 +0,0 @@
-Cu.import("resource://gre/modules/XPCOMUtils.jsm");
-
-XPCOMUtils.defineLazyModuleGetter(this, "NetUtil",
- "resource://gre/modules/NetUtil.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "Promise",
- "resource://gre/modules/Promise.jsm");
-// Global test server for serving safebrowsing updates.
-var gHttpServ = null;
-// Global nsIUrlClassifierDBService
-var gDbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
- .getService(Ci.nsIUrlClassifierDBService);
-// Security manager for creating nsIPrincipals from URIs
-var gSecMan = Cc["@mozilla.org/scriptsecuritymanager;1"]
- .getService(Ci.nsIScriptSecurityManager);
-
-// A map of tables to arrays of update redirect urls.
-var gTables = {};
-
-// Construct an update from a file.
-function readFileToString(aFilename) {
- let f = do_get_file(aFilename);
- let stream = Cc["@mozilla.org/network/file-input-stream;1"]
- .createInstance(Ci.nsIFileInputStream);
- stream.init(f, -1, 0, 0);
- let buf = NetUtil.readInputStreamToString(stream, stream.available());
- return buf;
-}
-
-// Registers a table for which to serve update chunks. Returns a promise that
-// resolves when that chunk has been downloaded.
-function registerTableUpdate(aTable, aFilename) {
- let deferred = Promise.defer();
- // If we haven't been given an update for this table yet, add it to the map
- if (!(aTable in gTables)) {
- gTables[aTable] = [];
- }
-
- // The number of chunks associated with this table.
- let numChunks = gTables[aTable].length + 1;
- let redirectPath = "/" + aTable + "-" + numChunks;
- let redirectUrl = "localhost:4444" + redirectPath;
-
- // Store redirect url for that table so we can return it later when we
- // process an update request.
- gTables[aTable].push(redirectUrl);
-
- gHttpServ.registerPathHandler(redirectPath, function(request, response) {
- do_print("Mock safebrowsing server handling request for " + redirectPath);
- let contents = readFileToString(aFilename);
- response.setHeader("Content-Type",
- "application/vnd.google.safebrowsing-update", false);
- response.setStatusLine(request.httpVersion, 200, "OK");
- response.bodyOutputStream.write(contents, contents.length);
- deferred.resolve(contents);
- });
- return deferred.promise;
-}
-
-// Construct a response with redirect urls.
-function processUpdateRequest() {
- let response = "n:1000\n";
- for (let table in gTables) {
- response += "i:" + table + "\n";
- for (let i = 0; i < gTables[table].length; ++i) {
- response += "u:" + gTables[table][i] + "\n";
- }
- }
- do_print("Returning update response: " + response);
- return response;
-}
-
-// Set up our test server to handle update requests.
-function run_test() {
- gHttpServ = new HttpServer();
- gHttpServ.registerDirectory("/", do_get_cwd());
-
- gHttpServ.registerPathHandler("/downloads", function(request, response) {
- let buf = NetUtil.readInputStreamToString(request.bodyInputStream,
- request.bodyInputStream.available());
- let blob = processUpdateRequest();
- response.setHeader("Content-Type",
- "application/vnd.google.safebrowsing-update", false);
- response.setStatusLine(request.httpVersion, 200, "OK");
- response.bodyOutputStream.write(blob, blob.length);
- });
-
- gHttpServ.start(4444);
- run_next_test();
-}
-
-function createURI(s) {
- let service = Cc["@mozilla.org/network/io-service;1"]
- .getService(Ci.nsIIOService);
- return service.newURI(s, null, null);
-}
-
-// Just throw if we ever get an update or download error.
-function handleError(aEvent) {
- do_throw("We didn't download or update correctly: " + aEvent);
-}
-
-add_test(function test_update() {
- let streamUpdater = Cc["@mozilla.org/url-classifier/streamupdater;1"]
- .getService(Ci.nsIUrlClassifierStreamUpdater);
-
- // Load up some update chunks for the safebrowsing server to serve.
- registerTableUpdate("goog-downloadwhite-digest256", "data/digest1.chunk");
- registerTableUpdate("goog-downloadwhite-digest256", "data/digest2.chunk");
-
- // Download some updates, and don't continue until the downloads are done.
- function updateSuccess(aEvent) {
- // Timeout of n:1000 is constructed in processUpdateRequest above and
- // passed back in the callback in nsIUrlClassifierStreamUpdater on success.
- do_check_eq("1000", aEvent);
- do_print("All data processed");
- run_next_test();
- }
- streamUpdater.downloadUpdates(
- "goog-downloadwhite-digest256",
- "goog-downloadwhite-digest256;\n",
- true,
- "http://localhost:4444/downloads",
- updateSuccess, handleError, handleError);
-});
-
-add_test(function test_url_not_whitelisted() {
- let uri = createURI("http://example.com");
- let principal = gSecMan.createCodebasePrincipal(uri, {});
- gDbService.lookup(principal, "goog-downloadwhite-digest256",
- function handleEvent(aEvent) {
- // This URI is not on any lists.
- do_check_eq("", aEvent);
- run_next_test();
- });
-});
-
-add_test(function test_url_whitelisted() {
- // Hash of "whitelisted.com/" (canonicalized URL) is:
- // 93CA5F48E15E9861CD37C2D95DB43D23CC6E6DE5C3F8FA6E8BE66F97CC518907
- let uri = createURI("http://whitelisted.com");
- let principal = gSecMan.createCodebasePrincipal(uri, {});
- gDbService.lookup(principal, "goog-downloadwhite-digest256",
- function handleEvent(aEvent) {
- do_check_eq("goog-downloadwhite-digest256", aEvent);
- run_next_test();
- });
-});
diff --git a/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js b/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
deleted file mode 100644
index 40fafd923..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
+++ /dev/null
@@ -1,403 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- http://creativecommons.org/publicdomain/zero/1.0/ */
-
-// This test ensures that the nsIUrlClassifierHashCompleter works as expected
-// and simulates an HTTP server to provide completions.
-//
-// In order to test completions, each group of completions sent as one request
-// to the HTTP server is called a completion set. There is currently not
-// support for multiple requests being sent to the server at once, in this test.
-// This tests makes a request for each element of |completionSets|, waits for
-// a response and then moves to the next element.
-// Each element of |completionSets| is an array of completions, and each
-// completion is an object with the properties:
-// hash: complete hash for the completion. Automatically right-padded
-// to be COMPLETE_LENGTH.
-// expectCompletion: boolean indicating whether the server should respond
-// with a full hash.
-// forceServerError: boolean indicating whether the server should respond
-// with a 503.
-// table: name of the table that the hash corresponds to. Only needs to be set
-// if a completion is expected.
-// chunkId: positive integer corresponding to the chunk that the hash belongs
-// to. Only needs to be set if a completion is expected.
-// multipleCompletions: boolean indicating whether the server should respond
-// with more than one full hash. If this is set to true
-// then |expectCompletion| must also be set to true and
-// |hash| must have the same prefix as all |completions|.
-// completions: an array of completions (objects with a hash, table and
-// chunkId property as described above). This property is only
-// used when |multipleCompletions| is set to true.
-
-// Basic prefixes with 2/3 completions.
-var basicCompletionSet = [
- {
- hash: "abcdefgh",
- expectCompletion: true,
- table: "test",
- chunkId: 1234,
- },
- {
- hash: "1234",
- expectCompletion: false,
- },
- {
- hash: "\u0000\u0000\u000012312",
- expectCompletion: true,
- table: "test",
- chunkId: 1234,
- }
-];
-
-// 3 prefixes with 0 completions to test HashCompleter handling a 204 status.
-var falseCompletionSet = [
- {
- hash: "1234",
- expectCompletion: false,
- },
- {
- hash: "",
- expectCompletion: false,
- },
- {
- hash: "abc",
- expectCompletion: false,
- }
-];
-
-// The current implementation (as of Mar 2011) sometimes sends duplicate
-// entries to HashCompleter and even expects responses for duplicated entries.
-var dupedCompletionSet = [
- {
- hash: "1234",
- expectCompletion: true,
- table: "test",
- chunkId: 1,
- },
- {
- hash: "5678",
- expectCompletion: false,
- table: "test2",
- chunkId: 2,
- },
- {
- hash: "1234",
- expectCompletion: true,
- table: "test",
- chunkId: 1,
- },
- {
- hash: "5678",
- expectCompletion: false,
- table: "test2",
- chunkId: 2
- }
-];
-
-// It is possible for a hash completion request to return with multiple
-// completions, the HashCompleter should return all of these.
-var multipleResponsesCompletionSet = [
- {
- hash: "1234",
- expectCompletion: true,
- multipleCompletions: true,
- completions: [
- {
- hash: "123456",
- table: "test1",
- chunkId: 3,
- },
- {
- hash: "123478",
- table: "test2",
- chunkId: 4,
- }
- ],
- }
-];
-
-function buildCompletionRequest(aCompletionSet) {
- let prefixes = [];
- let prefixSet = new Set();
- aCompletionSet.forEach(s => {
- let prefix = s.hash.substring(0, 4);
- if (prefixSet.has(prefix)) {
- return;
- }
- prefixSet.add(prefix);
- prefixes.push(prefix);
- });
- return 4 + ":" + (4 * prefixes.length) + "\n" + prefixes.join("");
-}
-
-function parseCompletionRequest(aRequest) {
- // Format: [partial_length]:[num_of_prefix * partial_length]\n[prefixes_data]
-
- let tokens = /(\d):(\d+)/.exec(aRequest);
- if (tokens.length < 3) {
- dump("Request format error.");
- return null;
- }
-
- let partialLength = parseInt(tokens[1]);
- let payloadLength = parseInt(tokens[2]);
-
- let payloadStart = tokens[1].length + // partial length
- 1 + // ':'
- tokens[2].length + // payload length
- 1; // '\n'
-
- let prefixSet = [];
- for (let i = payloadStart; i < aRequest.length; i += partialLength) {
- let prefix = aRequest.substr(i, partialLength);
- if (prefix.length !== partialLength) {
- dump("Header info not correct: " + aRequest.substr(0, payloadStart));
- return null;
- }
- prefixSet.push(prefix);
- }
- prefixSet.sort();
-
- return prefixSet;
-}
-
-// Compare the requests in string format.
-function compareCompletionRequest(aRequest1, aRequest2) {
- let prefixSet1 = parseCompletionRequest(aRequest1);
- let prefixSet2 = parseCompletionRequest(aRequest2);
-
- return equal(JSON.stringify(prefixSet1), JSON.stringify(prefixSet2));
-}
-
-// The fifth completion set is added at runtime by getRandomCompletionSet.
-// Each completion in the set only has one response and its purpose is to
-// provide an easy way to test the HashCompleter handling an arbitrarily large
-// completion set (determined by SIZE_OF_RANDOM_SET).
-const SIZE_OF_RANDOM_SET = 16;
-function getRandomCompletionSet(forceServerError) {
- let completionSet = [];
- let hashPrefixes = [];
-
- let seed = Math.floor(Math.random() * Math.pow(2, 32));
- dump("Using seed of " + seed + " for random completion set.\n");
- let rand = new LFSRgenerator(seed);
-
- for (let i = 0; i < SIZE_OF_RANDOM_SET; i++) {
- let completion = { expectCompletion: false, forceServerError: false, _finished: false };
-
- // Generate a random 256 bit hash. First we get a random number and then
- // convert it to a string.
- let hash;
- let prefix;
- do {
- hash = "";
- let length = 1 + rand.nextNum(5);
- for (let i = 0; i < length; i++)
- hash += String.fromCharCode(rand.nextNum(8));
- prefix = hash.substring(0,4);
- } while (hashPrefixes.indexOf(prefix) != -1);
-
- hashPrefixes.push(prefix);
- completion.hash = hash;
-
- if (!forceServerError) {
- completion.expectCompletion = rand.nextNum(1) == 1;
- } else {
- completion.forceServerError = true;
- }
- if (completion.expectCompletion) {
- // Generate a random alpha-numeric string of length at most 6 for the
- // table name.
- completion.table = (rand.nextNum(31)).toString(36);
-
- completion.chunkId = rand.nextNum(16);
- }
- completionSet.push(completion);
- }
-
- return completionSet;
-}
-
-var completionSets = [basicCompletionSet, falseCompletionSet,
- dupedCompletionSet, multipleResponsesCompletionSet];
-var currentCompletionSet = -1;
-var finishedCompletions = 0;
-
-const SERVER_PORT = 8080;
-const SERVER_PATH = "/hash-completer";
-var server;
-
-// Completion hashes are automatically right-padded with null chars to have a
-// length of COMPLETE_LENGTH.
-// Taken from nsUrlClassifierDBService.h
-const COMPLETE_LENGTH = 32;
-
-var completer = Cc["@mozilla.org/url-classifier/hashcompleter;1"].
- getService(Ci.nsIUrlClassifierHashCompleter);
-
-var gethashUrl;
-
-// Expected highest completion set for which the server sends a response.
-var expectedMaxServerCompletionSet = 0;
-var maxServerCompletionSet = 0;
-
-function run_test() {
- // Generate a random completion set that return successful responses.
- completionSets.push(getRandomCompletionSet(false));
- // We backoff after receiving an error, so requests shouldn't reach the
- // server after that.
- expectedMaxServerCompletionSet = completionSets.length;
- // Generate some completion sets that return 503s.
- for (let j = 0; j < 10; ++j) {
- completionSets.push(getRandomCompletionSet(true));
- }
-
- // Fix up the completions before running the test.
- for (let completionSet of completionSets) {
- for (let completion of completionSet) {
- // Pad the right of each |hash| so that the length is COMPLETE_LENGTH.
- if (completion.multipleCompletions) {
- for (let responseCompletion of completion.completions) {
- let numChars = COMPLETE_LENGTH - responseCompletion.hash.length;
- responseCompletion.hash += (new Array(numChars + 1)).join("\u0000");
- }
- }
- else {
- let numChars = COMPLETE_LENGTH - completion.hash.length;
- completion.hash += (new Array(numChars + 1)).join("\u0000");
- }
- }
- }
- do_test_pending();
-
- server = new HttpServer();
- server.registerPathHandler(SERVER_PATH, hashCompleterServer);
-
- server.start(-1);
- const SERVER_PORT = server.identity.primaryPort;
-
- gethashUrl = "http://localhost:" + SERVER_PORT + SERVER_PATH;
-
- runNextCompletion();
-}
-
-function runNextCompletion() {
- // The server relies on currentCompletionSet to send the correct response, so
- // don't increment it until we start the new set of callbacks.
- currentCompletionSet++;
- if (currentCompletionSet >= completionSets.length) {
- finish();
- return;
- }
-
- dump("Now on completion set index " + currentCompletionSet + ", length " +
- completionSets[currentCompletionSet].length + "\n");
- // Number of finished completions for this set.
- finishedCompletions = 0;
- for (let completion of completionSets[currentCompletionSet]) {
- completer.complete(completion.hash.substring(0,4), gethashUrl,
- (new callback(completion)));
- }
-}
-
-function hashCompleterServer(aRequest, aResponse) {
- let stream = aRequest.bodyInputStream;
- let wrapperStream = Cc["@mozilla.org/binaryinputstream;1"].
- createInstance(Ci.nsIBinaryInputStream);
- wrapperStream.setInputStream(stream);
-
- let len = stream.available();
- let data = wrapperStream.readBytes(len);
-
- // Check if we got the expected completion request.
- let expectedRequest = buildCompletionRequest(completionSets[currentCompletionSet]);
- compareCompletionRequest(data, expectedRequest);
-
- // To avoid a response with duplicate hash completions, we keep track of all
- // completed hash prefixes so far.
- let completedHashes = [];
- let responseText = "";
-
- function responseForCompletion(x) {
- return x.table + ":" + x.chunkId + ":" + x.hash.length + "\n" + x.hash;
- }
- // As per the spec, a server should response with a 204 if there are no
- // full-length hashes that match the prefixes.
- let httpStatus = 204;
- for (let completion of completionSets[currentCompletionSet]) {
- if (completion.expectCompletion &&
- (completedHashes.indexOf(completion.hash) == -1)) {
- completedHashes.push(completion.hash);
-
- if (completion.multipleCompletions)
- responseText += completion.completions.map(responseForCompletion).join("");
- else
- responseText += responseForCompletion(completion);
- }
- if (completion.forceServerError) {
- httpStatus = 503;
- }
- }
-
- dump("Server sending response for " + currentCompletionSet + "\n");
- maxServerCompletionSet = currentCompletionSet;
- if (responseText && httpStatus != 503) {
- aResponse.write(responseText);
- } else {
- aResponse.setStatusLine(null, httpStatus, null);
- }
-}
-
-
-function callback(completion) {
- this._completion = completion;
-}
-
-callback.prototype = {
- completion: function completion(hash, table, chunkId, trusted) {
- do_check_true(this._completion.expectCompletion);
- if (this._completion.multipleCompletions) {
- for (let completion of this._completion.completions) {
- if (completion.hash == hash) {
- do_check_eq(JSON.stringify(hash), JSON.stringify(completion.hash));
- do_check_eq(table, completion.table);
- do_check_eq(chunkId, completion.chunkId);
-
- completion._completed = true;
-
- if (this._completion.completions.every(x => x._completed))
- this._completed = true;
-
- break;
- }
- }
- }
- else {
- // Hashes are not actually strings and can contain arbitrary data.
- do_check_eq(JSON.stringify(hash), JSON.stringify(this._completion.hash));
- do_check_eq(table, this._completion.table);
- do_check_eq(chunkId, this._completion.chunkId);
-
- this._completed = true;
- }
- },
-
- completionFinished: function completionFinished(status) {
- finishedCompletions++;
- do_check_eq(!!this._completion.expectCompletion, !!this._completed);
- this._completion._finished = true;
-
- // currentCompletionSet can mutate before all of the callbacks are complete.
- if (currentCompletionSet < completionSets.length &&
- finishedCompletions == completionSets[currentCompletionSet].length) {
- runNextCompletion();
- }
- },
-};
-
-function finish() {
- do_check_eq(expectedMaxServerCompletionSet, maxServerCompletionSet);
- server.stop(function() {
- do_test_finished();
- });
-}
diff --git a/toolkit/components/url-classifier/tests/unit/test_listmanager.js b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
deleted file mode 100644
index ba11d930e..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_listmanager.js
+++ /dev/null
@@ -1,376 +0,0 @@
-Cu.import("resource://gre/modules/XPCOMUtils.jsm");
-Cu.import("resource://gre/modules/Services.jsm");
-
-XPCOMUtils.defineLazyModuleGetter(this, "NetUtil",
- "resource://gre/modules/NetUtil.jsm");
-
-// These tables share the same updateURL.
-const TEST_TABLE_DATA_LIST = [
- // 0:
- {
- tableName: "test-listmanager0-digest256",
- providerName: "google",
- updateUrl: "http://localhost:4444/safebrowsing/update",
- gethashUrl: "http://localhost:4444/safebrowsing/gethash0",
- },
-
- // 1:
- {
- tableName: "test-listmanager1-digest256",
- providerName: "google",
- updateUrl: "http://localhost:4444/safebrowsing/update",
- gethashUrl: "http://localhost:4444/safebrowsing/gethash1",
- },
-
- // 2.
- {
- tableName: "test-listmanager2-digest256",
- providerName: "google",
- updateUrl: "http://localhost:4444/safebrowsing/update",
- gethashUrl: "http://localhost:4444/safebrowsing/gethash2",
- }
-];
-
-// These tables have a different update URL (for v4).
-const TEST_TABLE_DATA_V4 = {
- tableName: "test-phish-proto",
- providerName: "google4",
- updateUrl: "http://localhost:5555/safebrowsing/update?",
- gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
-};
-const TEST_TABLE_DATA_V4_DISABLED = {
- tableName: "test-unwanted-proto",
- providerName: "google4",
- updateUrl: "http://localhost:5555/safebrowsing/update?",
- gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
-};
-
-const PREF_NEXTUPDATETIME = "browser.safebrowsing.provider.google.nextupdatetime";
-const PREF_NEXTUPDATETIME_V4 = "browser.safebrowsing.provider.google4.nextupdatetime";
-
-let gListManager = Cc["@mozilla.org/url-classifier/listmanager;1"]
- .getService(Ci.nsIUrlListManager);
-
-let gUrlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
- .getService(Ci.nsIUrlClassifierUtils);
-
-// Global test server for serving safebrowsing updates.
-let gHttpServ = null;
-let gUpdateResponse = "";
-let gExpectedUpdateRequest = "";
-let gExpectedQueryV4 = "";
-
-// Handles request for TEST_TABLE_DATA_V4.
-let gHttpServV4 = null;
-
-// These two variables are used to synchronize the last two racing updates
-// (in terms of "update URL") in test_update_all_tables().
-let gUpdatedCntForTableData = 0; // For TEST_TABLE_DATA_LIST.
-let gIsV4Updated = false; // For TEST_TABLE_DATA_V4.
-
-const NEW_CLIENT_STATE = 'sta\0te';
-const CHECKSUM = '\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78';
-
-prefBranch.setBoolPref("browser.safebrowsing.debug", true);
-
-// The "\xFF\xFF" is to generate a base64 string with "/".
-prefBranch.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
-
-// Register tables.
-TEST_TABLE_DATA_LIST.forEach(function(t) {
- gListManager.registerTable(t.tableName,
- t.providerName,
- t.updateUrl,
- t.gethashUrl);
-});
-
-gListManager.registerTable(TEST_TABLE_DATA_V4.tableName,
- TEST_TABLE_DATA_V4.providerName,
- TEST_TABLE_DATA_V4.updateUrl,
- TEST_TABLE_DATA_V4.gethashUrl);
-
-// To test Bug 1302044.
-gListManager.registerTable(TEST_TABLE_DATA_V4_DISABLED.tableName,
- TEST_TABLE_DATA_V4_DISABLED.providerName,
- TEST_TABLE_DATA_V4_DISABLED.updateUrl,
- TEST_TABLE_DATA_V4_DISABLED.gethashUrl);
-
-const SERVER_INVOLVED_TEST_CASE_LIST = [
- // - Do table0 update.
- // - Server would respond "a:5:32:32\n[DATA]".
- function test_update_table0() {
- disableAllUpdates();
-
- gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
- gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";\n";
-
- gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
- gUpdateResponse += readFileToString("data/digest2.chunk");
-
- forceTableUpdate();
- },
-
- // - Do table0 update again. Since chunk 5 was added to table0 in the last
- // update, the expected request contains "a:5".
- // - Server would respond "s;2-12\n[DATA]".
- function test_update_table0_with_existing_chunks() {
- disableAllUpdates();
-
- gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
- gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";a:5\n";
-
- gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
- gUpdateResponse += readFileToString("data/digest1.chunk");
-
- forceTableUpdate();
- },
-
- // - Do all-table update.
- // - Server would respond no chunk control.
- //
- // Note that this test MUST be the last one in the array since we rely on
- // the number of sever-involved test case to synchronize the racing last
- // two udpates for different URL.
- function test_update_all_tables() {
- disableAllUpdates();
-
- // Enable all tables including TEST_TABLE_DATA_V4!
- TEST_TABLE_DATA_LIST.forEach(function(t) {
- gListManager.enableUpdate(t.tableName);
- });
-
- // We register two v4 tables but only enable one of them
- // to verify that the disabled tables are not updated.
- // See Bug 1302044.
- gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
- gListManager.disableUpdate(TEST_TABLE_DATA_V4_DISABLED.tableName);
-
- // Expected results for v2.
- gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";a:5:s:2-12\n" +
- TEST_TABLE_DATA_LIST[1].tableName + ";\n" +
- TEST_TABLE_DATA_LIST[2].tableName + ";\n";
- gUpdateResponse = "n:1000\n";
-
- // We test the request against the query string since v4 request
- // would be appened to the query string. The request is generated
- // by protobuf API (binary) then encoded to base64 format.
- let requestV4 = gUrlUtils.makeUpdateRequestV4([TEST_TABLE_DATA_V4.tableName],
- [""],
- 1);
- gExpectedQueryV4 = "&$req=" + requestV4;
-
- forceTableUpdate();
- },
-
-];
-
-SERVER_INVOLVED_TEST_CASE_LIST.forEach(t => add_test(t));
-
-add_test(function test_partialUpdateV4() {
- disableAllUpdates();
-
- gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
-
- // Since the new client state has been responded and saved in
- // test_update_all_tables, this update request should send
- // a partial update to the server.
- let requestV4 = gUrlUtils.makeUpdateRequestV4([TEST_TABLE_DATA_V4.tableName],
- [btoa(NEW_CLIENT_STATE)],
- 1);
- gExpectedQueryV4 = "&$req=" + requestV4;
-
- forceTableUpdate();
-});
-
-// Tests nsIUrlListManager.getGethashUrl.
-add_test(function test_getGethashUrl() {
- TEST_TABLE_DATA_LIST.forEach(function (t) {
- equal(gListManager.getGethashUrl(t.tableName), t.gethashUrl);
- });
- equal(gListManager.getGethashUrl(TEST_TABLE_DATA_V4.tableName),
- TEST_TABLE_DATA_V4.gethashUrl);
- run_next_test();
-});
-
-function run_test() {
- // Setup primary testing server.
- gHttpServ = new HttpServer();
- gHttpServ.registerDirectory("/", do_get_cwd());
-
- gHttpServ.registerPathHandler("/safebrowsing/update", function(request, response) {
- let body = NetUtil.readInputStreamToString(request.bodyInputStream,
- request.bodyInputStream.available());
-
- // Verify if the request is as expected.
- equal(body, gExpectedUpdateRequest);
-
- // Respond the update which is controlled by the test case.
- response.setHeader("Content-Type",
- "application/vnd.google.safebrowsing-update", false);
- response.setStatusLine(request.httpVersion, 200, "OK");
- response.bodyOutputStream.write(gUpdateResponse, gUpdateResponse.length);
-
- gUpdatedCntForTableData++;
-
- if (gUpdatedCntForTableData !== SERVER_INVOLVED_TEST_CASE_LIST.length) {
- // This is not the last test case so run the next once upon the
- // the update success.
- waitForUpdateSuccess(run_next_test);
- return;
- }
-
- if (gIsV4Updated) {
- run_next_test(); // All tests are done. Just finish.
- return;
- }
-
- do_print("Waiting for TEST_TABLE_DATA_V4 to be tested ...");
- });
-
- gHttpServ.start(4444);
-
- // Setup v4 testing server for the different update URL.
- gHttpServV4 = new HttpServer();
- gHttpServV4.registerDirectory("/", do_get_cwd());
-
- gHttpServV4.registerPathHandler("/safebrowsing/update", function(request, response) {
- // V4 update request body should be empty.
- equal(request.bodyInputStream.available(), 0);
-
- // Not on the spec. Found in Chromium source code...
- equal(request.getHeader("X-HTTP-Method-Override"), "POST");
-
- // V4 update request uses GET.
- equal(request.method, "GET");
-
- // V4 append the base64 encoded request to the query string.
- equal(request.queryString, gExpectedQueryV4);
- equal(request.queryString.indexOf('+'), -1);
- equal(request.queryString.indexOf('/'), -1);
-
- // Respond a V2 compatible content for now. In the future we can
- // send a meaningful response to test Bug 1284178 to see if the
- // update is successfully stored to database.
- response.setHeader("Content-Type",
- "application/vnd.google.safebrowsing-update", false);
- response.setStatusLine(request.httpVersion, 200, "OK");
-
- // The protobuf binary represention of response:
- //
- // [
- // {
- // 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
- // 'response_type': 2, // FULL_UPDATE
- // 'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
- // 'checksum': { "sha256": CHECKSUM }, // CHECKSUM
- // 'additions': { 'compression_type': RAW,
- // 'prefix_size': 4,
- // 'raw_hashes': "00000001000000020000000300000004"}
- // }
- // ]
- //
- let content = "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
-
- response.bodyOutputStream.write(content, content.length);
-
- if (gIsV4Updated) {
- // This falls to the case where test_partialUpdateV4 is running.
- // We are supposed to have verified the update request contains
- // the state we set in the previous request.
- run_next_test();
- return;
- }
-
- waitUntilMetaDataSaved(NEW_CLIENT_STATE, CHECKSUM, () => {
- gIsV4Updated = true;
-
- if (gUpdatedCntForTableData === SERVER_INVOLVED_TEST_CASE_LIST.length) {
- // All tests are done!
- run_next_test();
- return;
- }
-
- do_print("Wait for all sever-involved tests to be done ...");
- });
-
- });
-
- gHttpServV4.start(5555);
-
- run_next_test();
-}
-
-// A trick to force updating tables. However, before calling this, we have to
-// call disableAllUpdates() first to clean up the updateCheckers in listmanager.
-function forceTableUpdate() {
- prefBranch.setCharPref(PREF_NEXTUPDATETIME, "1");
- prefBranch.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
- gListManager.maybeToggleUpdateChecking();
-}
-
-function disableAllUpdates() {
- TEST_TABLE_DATA_LIST.forEach(t => gListManager.disableUpdate(t.tableName));
- gListManager.disableUpdate(TEST_TABLE_DATA_V4.tableName);
-}
-
-// Since there's no public interface on listmanager to know the update success,
-// we could only rely on the refresh of "nextupdatetime".
-function waitForUpdateSuccess(callback) {
- let nextupdatetime = parseInt(prefBranch.getCharPref(PREF_NEXTUPDATETIME));
- do_print("nextupdatetime: " + nextupdatetime);
- if (nextupdatetime !== 1) {
- callback();
- return;
- }
- do_timeout(1000, waitForUpdateSuccess.bind(null, callback));
-}
-
-// Construct an update from a file.
-function readFileToString(aFilename) {
- let f = do_get_file(aFilename);
- let stream = Cc["@mozilla.org/network/file-input-stream;1"]
- .createInstance(Ci.nsIFileInputStream);
- stream.init(f, -1, 0, 0);
- let buf = NetUtil.readInputStreamToString(stream, stream.available());
- return buf;
-}
-
-function waitUntilMetaDataSaved(expectedState, expectedChecksum, callback) {
- let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
- .getService(Ci.nsIUrlClassifierDBService);
-
- dbService.getTables(metaData => {
- do_print("metadata: " + metaData);
- let didCallback = false;
- metaData.split("\n").some(line => {
- // Parse [tableName];[stateBase64]
- let p = line.indexOf(";");
- if (-1 === p) {
- return false; // continue.
- }
- let tableName = line.substring(0, p);
- let metadata = line.substring(p + 1).split(":");
- let stateBase64 = metadata[0];
- let checksumBase64 = metadata[1];
-
- if (tableName !== 'test-phish-proto') {
- return false; // continue.
- }
-
- if (stateBase64 === btoa(expectedState) &&
- checksumBase64 === btoa(expectedChecksum)) {
- do_print('State has been saved to disk!');
- callback();
- didCallback = true;
- }
-
- return true; // break no matter whether the state is matching.
- });
-
- if (!didCallback) {
- do_timeout(1000, waitUntilMetaDataSaved.bind(null, expectedState,
- expectedChecksum,
- callback));
- }
- });
-}
diff --git a/toolkit/components/url-classifier/tests/unit/test_partial.js b/toolkit/components/url-classifier/tests/unit/test_partial.js
deleted file mode 100644
index 83243fb4e..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_partial.js
+++ /dev/null
@@ -1,825 +0,0 @@
-
-/**
- * DummyCompleter() lets tests easily specify the results of a partial
- * hash completion request.
- */
-function DummyCompleter() {
- this.fragments = {};
- this.queries = [];
- this.tableName = "test-phish-simple";
-}
-
-DummyCompleter.prototype =
-{
-QueryInterface: function(iid)
-{
- if (!iid.equals(Ci.nsISupports) &&
- !iid.equals(Ci.nsIUrlClassifierHashCompleter)) {
- throw Cr.NS_ERROR_NO_INTERFACE;
- }
- return this;
-},
-
-complete: function(partialHash, gethashUrl, cb)
-{
- this.queries.push(partialHash);
- var fragments = this.fragments;
- var self = this;
- var doCallback = function() {
- if (self.alwaysFail) {
- cb.completionFinished(1);
- return;
- }
- var results;
- if (fragments[partialHash]) {
- for (var i = 0; i < fragments[partialHash].length; i++) {
- var chunkId = fragments[partialHash][i][0];
- var hash = fragments[partialHash][i][1];
- cb.completion(hash, self.tableName, chunkId);
- }
- }
- cb.completionFinished(0);
- }
- var timer = new Timer(0, doCallback);
-},
-
-getHash: function(fragment)
-{
- var converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"].
- createInstance(Ci.nsIScriptableUnicodeConverter);
- converter.charset = "UTF-8";
- var data = converter.convertToByteArray(fragment);
- var ch = Cc["@mozilla.org/security/hash;1"].createInstance(Ci.nsICryptoHash);
- ch.init(ch.SHA256);
- ch.update(data, data.length);
- var hash = ch.finish(false);
- return hash.slice(0, 32);
-},
-
-addFragment: function(chunkId, fragment)
-{
- this.addHash(chunkId, this.getHash(fragment));
-},
-
-// This method allows the caller to generate complete hashes that match the
-// prefix of a real fragment, but have different complete hashes.
-addConflict: function(chunkId, fragment)
-{
- var realHash = this.getHash(fragment);
- var invalidHash = this.getHash("blah blah blah blah blah");
- this.addHash(chunkId, realHash.slice(0, 4) + invalidHash.slice(4, 32));
-},
-
-addHash: function(chunkId, hash)
-{
- var partial = hash.slice(0, 4);
- if (this.fragments[partial]) {
- this.fragments[partial].push([chunkId, hash]);
- } else {
- this.fragments[partial] = [[chunkId, hash]];
- }
-},
-
-compareQueries: function(fragments)
-{
- var expectedQueries = [];
- for (var i = 0; i < fragments.length; i++) {
- expectedQueries.push(this.getHash(fragments[i]).slice(0, 4));
- }
- do_check_eq(this.queries.length, expectedQueries.length);
- expectedQueries.sort();
- this.queries.sort();
- for (var i = 0; i < this.queries.length; i++) {
- do_check_eq(this.queries[i], expectedQueries[i]);
- }
-}
-};
-
-function setupCompleter(table, hits, conflicts)
-{
- var completer = new DummyCompleter();
- completer.tableName = table;
- for (var i = 0; i < hits.length; i++) {
- var chunkId = hits[i][0];
- var fragments = hits[i][1];
- for (var j = 0; j < fragments.length; j++) {
- completer.addFragment(chunkId, fragments[j]);
- }
- }
- for (var i = 0; i < conflicts.length; i++) {
- var chunkId = conflicts[i][0];
- var fragments = conflicts[i][1];
- for (var j = 0; j < fragments.length; j++) {
- completer.addConflict(chunkId, fragments[j]);
- }
- }
-
- dbservice.setHashCompleter(table, completer);
-
- return completer;
-}
-
-function installCompleter(table, fragments, conflictFragments)
-{
- return setupCompleter(table, fragments, conflictFragments);
-}
-
-function installFailingCompleter(table) {
- var completer = setupCompleter(table, [], []);
- completer.alwaysFail = true;
- return completer;
-}
-
-// Helper assertion for checking dummy completer queries
-gAssertions.completerQueried = function(data, cb)
-{
- var completer = data[0];
- completer.compareQueries(data[1]);
- cb();
-}
-
-function doTest(updates, assertions)
-{
- doUpdateTest(updates, assertions, runNextTest, updateError);
-}
-
-// Test an add of two partial urls to a fresh database
-function testPartialAdds() {
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : addUrls,
- "completerQueried" : [completer, addUrls]
- };
-
-
- doTest([update], assertions);
-}
-
-function testPartialAddsWithConflicts() {
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
- // Each result will have both a real match and a conflict
- var completer = installCompleter('test-phish-simple',
- [[1, addUrls]],
- [[1, addUrls]]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : addUrls,
- "completerQueried" : [completer, addUrls]
- };
-
- doTest([update], assertions);
-}
-
-// Test whether the fragmenting code does not cause duplicated completions
-function testFragments() {
- var addUrls = [ "foo.com/a/b/c", "foo.net/", "foo.com/c/" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : addUrls,
- "completerQueried" : [completer, addUrls]
- };
-
-
- doTest([update], assertions);
-}
-
-// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
-// section 6.2 example 1
-function testSpecFragments() {
- var probeUrls = [ "a.b.c/1/2.html?param=1" ];
-
- var addUrls = [ "a.b.c/1/2.html",
- "a.b.c/",
- "a.b.c/1/",
- "b.c/1/2.html?param=1",
- "b.c/1/2.html",
- "b.c/",
- "b.c/1/",
- "a.b.c/1/2.html?param=1" ];
-
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : probeUrls,
- "completerQueried" : [completer, addUrls]
- };
-
- doTest([update], assertions);
-
-}
-
-// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
-// section 6.2 example 2
-function testMoreSpecFragments() {
- var probeUrls = [ "a.b.c.d.e.f.g/1.html" ];
-
- var addUrls = [ "a.b.c.d.e.f.g/1.html",
- "a.b.c.d.e.f.g/",
- "c.d.e.f.g/1.html",
- "c.d.e.f.g/",
- "d.e.f.g/1.html",
- "d.e.f.g/",
- "e.f.g/1.html",
- "e.f.g/",
- "f.g/1.html",
- "f.g/" ];
-
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : probeUrls,
- "completerQueried" : [completer, addUrls]
- };
-
- doTest([update], assertions);
-
-}
-
-function testFalsePositives() {
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
- // Each result will have no matching complete hashes and a non-matching
- // conflict
- var completer = installCompleter('test-phish-simple', [], [[1, addUrls]]);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsDontExist" : addUrls,
- "completerQueried" : [completer, addUrls]
- };
-
- doTest([update], assertions);
-}
-
-function testEmptyCompleter() {
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
- // Completer will never return full hashes
- var completer = installCompleter('test-phish-simple', [], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsDontExist" : addUrls,
- "completerQueried" : [completer, addUrls]
- };
-
- doTest([update], assertions);
-}
-
-function testCompleterFailure() {
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
- // Completer will never return full hashes
- var completer = installFailingCompleter('test-phish-simple');
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsDontExist" : addUrls,
- "completerQueried" : [completer, addUrls]
- };
-
- doTest([update], assertions);
-}
-
-function testMixedSizesSameDomain() {
- var add1Urls = [ "foo.com/a" ];
- var add2Urls = [ "foo.com/b" ];
-
- var update1 = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : add1Urls }],
- 4);
- var update2 = buildPhishingUpdate(
- [
- { "chunkNum" : 2,
- "urls" : add2Urls }],
- 32);
-
- // We should only need to complete the partial hashes
- var completer = installCompleter('test-phish-simple', [[1, add1Urls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1-2",
- // both urls should match...
- "urlsExist" : add1Urls.concat(add2Urls),
- // ... but the completer should only be queried for the partial entry
- "completerQueried" : [completer, add1Urls]
- };
-
- doTest([update1, update2], assertions);
-}
-
-function testMixedSizesDifferentDomains() {
- var add1Urls = [ "foo.com/a" ];
- var add2Urls = [ "bar.com/b" ];
-
- var update1 = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : add1Urls }],
- 4);
- var update2 = buildPhishingUpdate(
- [
- { "chunkNum" : 2,
- "urls" : add2Urls }],
- 32);
-
- // We should only need to complete the partial hashes
- var completer = installCompleter('test-phish-simple', [[1, add1Urls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1-2",
- // both urls should match...
- "urlsExist" : add1Urls.concat(add2Urls),
- // ... but the completer should only be queried for the partial entry
- "completerQueried" : [completer, add1Urls]
- };
-
- doTest([update1, update2], assertions);
-}
-
-function testInvalidHashSize()
-{
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 12); // only 4 and 32 are legal hash sizes
-
- var addUrls2 = [ "zaz.com/a", "xyz.com/b" ];
- var update2 = buildPhishingUpdate(
- [
- { "chunkNum" : 2,
- "urls" : addUrls2
- }],
- 4);
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:2",
- "urlsDontExist" : addUrls
- };
-
- // A successful update will trigger an error
- doUpdateTest([update2, update], assertions, updateError, runNextTest);
-}
-
-function testWrongTable()
-{
- var addUrls = [ "foo.com/a" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
- var completer = installCompleter('test-malware-simple', // wrong table
- [[1, addUrls]], []);
-
- // The above installCompleter installs the completer for test-malware-simple,
- // we want it to be used for test-phish-simple too.
- dbservice.setHashCompleter("test-phish-simple", completer);
-
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- // The urls were added as phishing urls, but the completer is claiming
- // that they are malware urls, and we trust the completer in this case.
- // The result will be discarded, so we can only check for non-existence.
- "urlsDontExist" : addUrls,
- // Make sure the completer was actually queried.
- "completerQueried" : [completer, addUrls]
- };
-
- doUpdateTest([update], assertions,
- function() {
- // Give the dbservice a chance to (not) cache the result.
- var timer = new Timer(3000, function() {
- // The miss earlier will have caused a miss to be cached.
- // Resetting the completer does not count as an update,
- // so we will not be probed again.
- var newCompleter = installCompleter('test-malware-simple', [[1, addUrls]], []); dbservice.setHashCompleter("test-phish-simple",
- newCompleter);
-
- var assertions = {
- "urlsDontExist" : addUrls
- };
- checkAssertions(assertions, runNextTest);
- });
- }, updateError);
-}
-
-function setupCachedResults(addUrls, part2)
-{
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- // Request the add url. This should cause the completion to be cached.
- "urlsExist" : addUrls,
- // Make sure the completer was actually queried.
- "completerQueried" : [completer, addUrls]
- };
-
- doUpdateTest([update], assertions,
- function() {
- // Give the dbservice a chance to cache the result.
- var timer = new Timer(3000, part2);
- }, updateError);
-}
-
-function testCachedResults()
-{
- setupCachedResults(["foo.com/a"], function(add) {
- // This is called after setupCachedResults(). Verify that
- // checking the url again does not cause a completer request.
-
- // install a new completer, this one should never be queried.
- var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
-
- var assertions = {
- "urlsExist" : ["foo.com/a"],
- "completerQueried" : [newCompleter, []]
- };
- checkAssertions(assertions, runNextTest);
- });
-}
-
-function testCachedResultsWithSub() {
- setupCachedResults(["foo.com/a"], function() {
- // install a new completer, this one should never be queried.
- var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
-
- var removeUpdate = buildPhishingUpdate(
- [ { "chunkNum" : 2,
- "chunkType" : "s",
- "urls": ["1:foo.com/a"] }],
- 4);
-
- var assertions = {
- "urlsDontExist" : ["foo.com/a"],
- "completerQueried" : [newCompleter, []]
- }
-
- doTest([removeUpdate], assertions);
- });
-}
-
-function testCachedResultsWithExpire() {
- setupCachedResults(["foo.com/a"], function() {
- // install a new completer, this one should never be queried.
- var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
-
- var expireUpdate =
- "n:1000\n" +
- "i:test-phish-simple\n" +
- "ad:1\n";
-
- var assertions = {
- "urlsDontExist" : ["foo.com/a"],
- "completerQueried" : [newCompleter, []]
- }
- doTest([expireUpdate], assertions);
- });
-}
-
-function testCachedResultsUpdate()
-{
- var existUrls = ["foo.com/a"];
- setupCachedResults(existUrls, function() {
- // This is called after setupCachedResults(). Verify that
- // checking the url again does not cause a completer request.
-
- // install a new completer, this one should never be queried.
- var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
-
- var assertions = {
- "urlsExist" : existUrls,
- "completerQueried" : [newCompleter, []]
- };
-
- var addUrls = ["foobar.org/a"];
-
- var update2 = buildPhishingUpdate(
- [
- { "chunkNum" : 2,
- "urls" : addUrls
- }],
- 4);
-
- checkAssertions(assertions, function () {
- // Apply the update. The cached completes should be gone.
- doStreamUpdate(update2, function() {
- // Now the completer gets queried again.
- var newCompleter2 = installCompleter('test-phish-simple', [[1, existUrls]], []);
- var assertions2 = {
- "tableData" : "test-phish-simple;a:1-2",
- "urlsExist" : existUrls,
- "completerQueried" : [newCompleter2, existUrls]
- };
- checkAssertions(assertions2, runNextTest);
- }, updateError);
- });
- });
-}
-
-function testCachedResultsFailure()
-{
- var existUrls = ["foo.com/a"];
- setupCachedResults(existUrls, function() {
- // This is called after setupCachedResults(). Verify that
- // checking the url again does not cause a completer request.
-
- // install a new completer, this one should never be queried.
- var newCompleter = installCompleter('test-phish-simple', [[1, []]], []);
-
- var assertions = {
- "urlsExist" : existUrls,
- "completerQueried" : [newCompleter, []]
- };
-
- var addUrls = ["foobar.org/a"];
-
- var update2 = buildPhishingUpdate(
- [
- { "chunkNum" : 2,
- "urls" : addUrls
- }],
- 4);
-
- checkAssertions(assertions, function() {
- // Apply the update. The cached completes should be gone.
- doErrorUpdate("test-phish-simple,test-malware-simple", function() {
- // Now the completer gets queried again.
- var newCompleter2 = installCompleter('test-phish-simple', [[1, existUrls]], []);
- var assertions2 = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : existUrls,
- "completerQueried" : [newCompleter2, existUrls]
- };
- checkAssertions(assertions2, runNextTest);
- }, updateError);
- });
- });
-}
-
-function testErrorList()
-{
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 4);
- // The update failure should will kill the completes, so the above
- // must be a prefix to get any hit at all past the update failure.
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : addUrls,
- // These are complete urls, and will only be completed if the
- // list is stale.
- "completerQueried" : [completer, addUrls]
- };
-
- // Apply the update.
- doStreamUpdate(update, function() {
- // Now the test-phish-simple and test-malware-simple tables are marked
- // as fresh. Fake an update failure to mark them stale.
- doErrorUpdate("test-phish-simple,test-malware-simple", function() {
- // Now the lists should be marked stale. Check assertions.
- checkAssertions(assertions, runNextTest);
- }, updateError);
- }, updateError);
-}
-
-
-function testStaleList()
-{
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 32);
-
- var completer = installCompleter('test-phish-simple', [[1, addUrls]], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : addUrls,
- // These are complete urls, and will only be completed if the
- // list is stale.
- "completerQueried" : [completer, addUrls]
- };
-
- // Consider a match stale after one second.
- prefBranch.setIntPref("urlclassifier.max-complete-age", 1);
-
- // Apply the update.
- doStreamUpdate(update, function() {
- // Now the test-phish-simple and test-malware-simple tables are marked
- // as fresh. Wait three seconds to make sure the list is marked stale.
- new Timer(3000, function() {
- // Now the lists should be marked stale. Check assertions.
- checkAssertions(assertions, function() {
- prefBranch.setIntPref("urlclassifier.max-complete-age", 2700);
- runNextTest();
- });
- }, updateError);
- }, updateError);
-}
-
-// Same as testStaleList, but verifies that an empty response still
-// unconfirms the entry.
-function testStaleListEmpty()
-{
- var addUrls = [ "foo.com/a", "foo.com/b", "bar.com/c" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls
- }],
- 32);
-
- var completer = installCompleter('test-phish-simple', [], []);
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- // None of these should match, because they won't be completed
- "urlsDontExist" : addUrls,
- // These are complete urls, and will only be completed if the
- // list is stale.
- "completerQueried" : [completer, addUrls]
- };
-
- // Consider a match stale after one second.
- prefBranch.setIntPref("urlclassifier.max-complete-age", 1);
-
- // Apply the update.
- doStreamUpdate(update, function() {
- // Now the test-phish-simple and test-malware-simple tables are marked
- // as fresh. Wait three seconds to make sure the list is marked stale.
- new Timer(3000, function() {
- // Now the lists should be marked stale. Check assertions.
- checkAssertions(assertions, function() {
- prefBranch.setIntPref("urlclassifier.max-complete-age", 2700);
- runNextTest();
- });
- }, updateError);
- }, updateError);
-}
-
-
-// Verify that different lists (test-phish-simple,
-// test-malware-simple) maintain their freshness separately.
-function testErrorListIndependent()
-{
- var phishUrls = [ "phish.com/a" ];
- var malwareUrls = [ "attack.com/a" ];
- var update = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : phishUrls
- }],
- 4);
- // These have to persist past the update failure, so they must be prefixes,
- // not completes.
-
- update += buildMalwareUpdate(
- [
- { "chunkNum" : 2,
- "urls" : malwareUrls
- }],
- 32);
-
- var completer = installCompleter('test-phish-simple', [[1, phishUrls]], []);
-
- var assertions = {
- "tableData" : "test-malware-simple;a:2\ntest-phish-simple;a:1",
- "urlsExist" : phishUrls,
- "malwareUrlsExist" : malwareUrls,
- // Only this phishing urls should be completed, because only the phishing
- // urls will be stale.
- "completerQueried" : [completer, phishUrls]
- };
-
- // Apply the update.
- doStreamUpdate(update, function() {
- // Now the test-phish-simple and test-malware-simple tables are
- // marked as fresh. Fake an update failure to mark *just*
- // phishing data as stale.
- doErrorUpdate("test-phish-simple", function() {
- // Now the lists should be marked stale. Check assertions.
- checkAssertions(assertions, runNextTest);
- }, updateError);
- }, updateError);
-}
-
-function run_test()
-{
- runTests([
- testPartialAdds,
- testPartialAddsWithConflicts,
- testFragments,
- testSpecFragments,
- testMoreSpecFragments,
- testFalsePositives,
- testEmptyCompleter,
- testCompleterFailure,
- testMixedSizesSameDomain,
- testMixedSizesDifferentDomains,
- testInvalidHashSize,
- testWrongTable,
- testCachedResults,
- testCachedResultsWithSub,
- testCachedResultsWithExpire,
- testCachedResultsUpdate,
- testCachedResultsFailure,
- testStaleList,
- testStaleListEmpty,
- testErrorList,
- testErrorListIndependent
- ]);
-}
-
-do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_pref.js b/toolkit/components/url-classifier/tests/unit/test_pref.js
deleted file mode 100644
index 68030a246..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_pref.js
+++ /dev/null
@@ -1,14 +0,0 @@
-function run_test() {
- let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
- .getService(Ci.nsIUrlClassifierUtils);
-
- // The google protocol version should be "2.2" until we enable SB v4
- // by default.
- equal(urlUtils.getProtocolVersion("google"), "2.2");
-
- // Mozilla protocol version will stick to "2.2".
- equal(urlUtils.getProtocolVersion("mozilla"), "2.2");
-
- // Unknown provider version will be "2.2".
- equal(urlUtils.getProtocolVersion("unknown-provider"), "2.2");
-} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/test_prefixset.js b/toolkit/components/url-classifier/tests/unit/test_prefixset.js
deleted file mode 100644
index f2ecc9c2b..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_prefixset.js
+++ /dev/null
@@ -1,232 +0,0 @@
-// newPset: returns an empty nsIUrlClassifierPrefixSet.
-function newPset() {
- let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
- .createInstance(Ci.nsIUrlClassifierPrefixSet);
- pset.init("all");
- return pset;
-}
-
-// arrContains: returns true if |arr| contains the element |target|. Uses binary
-// search and requires |arr| to be sorted.
-function arrContains(arr, target) {
- let start = 0;
- let end = arr.length - 1;
- let i = 0;
-
- while (end > start) {
- i = start + (end - start >> 1);
- let value = arr[i];
-
- if (value < target)
- start = i+1;
- else if (value > target)
- end = i-1;
- else
- break;
- }
- if (start == end)
- i = start;
-
- return (!(i < 0 || i >= arr.length) && arr[i] == target);
-}
-
-// checkContents: Check whether the PrefixSet pset contains
-// the prefixes in the passed array.
-function checkContents(pset, prefixes) {
- var outcount = {}, outset = {};
- outset = pset.getPrefixes(outcount);
- let inset = prefixes;
- do_check_eq(inset.length, outset.length);
- inset.sort((x,y) => x - y);
- for (let i = 0; i < inset.length; i++) {
- do_check_eq(inset[i], outset[i]);
- }
-}
-
-function wrappedProbe(pset, prefix) {
- return pset.contains(prefix);
-};
-
-// doRandomLookups: we use this to test for false membership with random input
-// over the range of prefixes (unsigned 32-bits integers).
-// pset: a nsIUrlClassifierPrefixSet to test.
-// prefixes: an array of prefixes supposed to make up the prefix set.
-// N: number of random lookups to make.
-function doRandomLookups(pset, prefixes, N) {
- for (let i = 0; i < N; i++) {
- let randInt = prefixes[0];
- while (arrContains(prefixes, randInt))
- randInt = Math.floor(Math.random() * Math.pow(2, 32));
-
- do_check_false(wrappedProbe(pset, randInt));
- }
-}
-
-// doExpectedLookups: we use this to test expected membership.
-// pset: a nsIUrlClassifierPrefixSet to test.
-// prefixes:
-function doExpectedLookups(pset, prefixes, N) {
- for (let i = 0; i < N; i++) {
- prefixes.forEach(function (x) {
- dump("Checking " + x + "\n");
- do_check_true(wrappedProbe(pset, x));
- });
- }
-}
-
-// testBasicPset: A very basic test of the prefix set to make sure that it
-// exists and to give a basic example of its use.
-function testBasicPset() {
- let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
- .createInstance(Ci.nsIUrlClassifierPrefixSet);
- let prefixes = [2,50,100,2000,78000,1593203];
- pset.setPrefixes(prefixes, prefixes.length);
-
- do_check_true(wrappedProbe(pset, 100));
- do_check_false(wrappedProbe(pset, 100000));
- do_check_true(wrappedProbe(pset, 1593203));
- do_check_false(wrappedProbe(pset, 999));
- do_check_false(wrappedProbe(pset, 0));
-
-
- checkContents(pset, prefixes);
-}
-
-function testDuplicates() {
- let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
- .createInstance(Ci.nsIUrlClassifierPrefixSet);
- let prefixes = [1,1,2,2,2,3,3,3,3,3,3,5,6,6,7,7,9,9,9];
- pset.setPrefixes(prefixes, prefixes.length);
-
- do_check_true(wrappedProbe(pset, 1));
- do_check_true(wrappedProbe(pset, 2));
- do_check_true(wrappedProbe(pset, 5));
- do_check_true(wrappedProbe(pset, 9));
- do_check_false(wrappedProbe(pset, 4));
- do_check_false(wrappedProbe(pset, 8));
-
-
- checkContents(pset, prefixes);
-}
-
-function testSimplePset() {
- let pset = newPset();
- let prefixes = [1,2,100,400,123456789];
- pset.setPrefixes(prefixes, prefixes.length);
-
- doRandomLookups(pset, prefixes, 100);
- doExpectedLookups(pset, prefixes, 1);
-
-
- checkContents(pset, prefixes);
-}
-
-function testReSetPrefixes() {
- let pset = newPset();
- let prefixes = [1, 5, 100, 1000, 150000];
- pset.setPrefixes(prefixes, prefixes.length);
-
- doExpectedLookups(pset, prefixes, 1);
-
- let secondPrefixes = [12, 50, 300, 2000, 5000, 200000];
- pset.setPrefixes(secondPrefixes, secondPrefixes.length);
-
- doExpectedLookups(pset, secondPrefixes, 1);
- for (let i = 0; i < prefixes.length; i++) {
- do_check_false(wrappedProbe(pset, prefixes[i]));
- }
-
-
- checkContents(pset, secondPrefixes);
-}
-
-function testLoadSaveLargeSet() {
- let N = 1000;
- let arr = [];
-
- for (let i = 0; i < N; i++) {
- let randInt = Math.floor(Math.random() * Math.pow(2, 32));
- arr.push(randInt);
- }
-
- arr.sort((x,y) => x - y);
-
- let pset = newPset();
- pset.setPrefixes(arr, arr.length);
-
- doExpectedLookups(pset, arr, 1);
- doRandomLookups(pset, arr, 1000);
-
- checkContents(pset, arr);
-
- // Now try to save, restore, and redo the lookups
- var file = dirSvc.get('ProfLD', Ci.nsIFile);
- file.append("testLarge.pset");
-
- pset.storeToFile(file);
-
- let psetLoaded = newPset();
- psetLoaded.loadFromFile(file);
-
- doExpectedLookups(psetLoaded, arr, 1);
- doRandomLookups(psetLoaded, arr, 1000);
-
- checkContents(psetLoaded, arr);
-}
-
-function testTinySet() {
- let pset = Cc["@mozilla.org/url-classifier/prefixset;1"]
- .createInstance(Ci.nsIUrlClassifierPrefixSet);
- let prefixes = [1];
- pset.setPrefixes(prefixes, prefixes.length);
-
- do_check_true(wrappedProbe(pset, 1));
- do_check_false(wrappedProbe(pset, 100000));
- checkContents(pset, prefixes);
-
- prefixes = [];
- pset.setPrefixes(prefixes, prefixes.length);
- do_check_false(wrappedProbe(pset, 1));
- checkContents(pset, prefixes);
-}
-
-function testLoadSaveNoDelta() {
- let N = 100;
- let arr = [];
-
- for (let i = 0; i < N; i++) {
- // construct a tree without deltas by making the distance
- // between entries larger than 16 bits
- arr.push(((1 << 16) + 1) * i);
- }
-
- let pset = newPset();
- pset.setPrefixes(arr, arr.length);
-
- doExpectedLookups(pset, arr, 1);
-
- var file = dirSvc.get('ProfLD', Ci.nsIFile);
- file.append("testNoDelta.pset");
-
- pset.storeToFile(file);
- pset.loadFromFile(file);
-
- doExpectedLookups(pset, arr, 1);
-}
-
-var tests = [testBasicPset,
- testSimplePset,
- testReSetPrefixes,
- testLoadSaveLargeSet,
- testDuplicates,
- testTinySet,
- testLoadSaveNoDelta];
-
-function run_test() {
- // None of the tests use |executeSoon| or any sort of callbacks, so we can
- // just run them in succession.
- for (let i = 0; i < tests.length; i++) {
- dump("Running " + tests[i].name + "\n");
- tests[i]();
- }
-}
diff --git a/toolkit/components/url-classifier/tests/unit/test_provider_url.js b/toolkit/components/url-classifier/tests/unit/test_provider_url.js
deleted file mode 100644
index 9a946dc3f..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_provider_url.js
+++ /dev/null
@@ -1,34 +0,0 @@
-Cu.import("resource://testing-common/AppInfo.jsm", this);
-Cu.import("resource://gre/modules/Services.jsm");
-
-function updateVersion(version) {
- updateAppInfo({ version });
-}
-
-add_test(function test_provider_url() {
- let urls = [
- "browser.safebrowsing.provider.google.updateURL",
- "browser.safebrowsing.provider.google.gethashURL",
- "browser.safebrowsing.provider.mozilla.updateURL",
- "browser.safebrowsing.provider.mozilla.gethashURL"
- ];
-
- let versions = [
- "49.0",
- "49.0.1",
- "49.0a1",
- "49.0b1",
- "49.0esr",
- "49.0.1esr"
- ];
-
- for (let version of versions) {
- for (let url of urls) {
- updateVersion(version);
- let value = Services.urlFormatter.formatURLPref(url);
- Assert.notEqual(value.indexOf("&appver=49.0&"), -1);
- }
- }
-
- run_next_test();
-});
diff --git a/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
deleted file mode 100644
index 45309ba54..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
+++ /dev/null
@@ -1,23 +0,0 @@
-function run_test() {
- let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
- .getService(Ci.nsIUrlClassifierUtils);
-
- // No list at all.
- let requestNoList = urlUtils.makeUpdateRequestV4([], [], 0);
-
- // Only one valid list name.
- let requestOneValid =
- urlUtils.makeUpdateRequestV4(["goog-phish-proto"], ["AAAAAA"], 1);
-
- // Only one invalid list name.
- let requestOneInvalid =
- urlUtils.makeUpdateRequestV4(["bad-list-name"], ["AAAAAA"], 1);
-
- // One valid and one invalid list name.
- let requestOneInvalidOneValid =
- urlUtils.makeUpdateRequestV4(["goog-phish-proto", "bad-list-name"],
- ["AAAAAA", "AAAAAA"], 2);
-
- equal(requestNoList, requestOneInvalid);
- equal(requestOneValid, requestOneInvalidOneValid);
-} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/test_streamupdater.js b/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
deleted file mode 100644
index e5abc4e91..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
+++ /dev/null
@@ -1,288 +0,0 @@
-function doTest(updates, assertions, expectError)
-{
- if (expectError) {
- doUpdateTest(updates, assertions, updateError, runNextTest);
- } else {
- doUpdateTest(updates, assertions, runNextTest, updateError);
- }
-}
-
-// Never use the same URLs for multiple tests, because we aren't guaranteed
-// to reset the database between tests.
-function testFillDb() {
- var add1Urls = [ "zaz.com/a", "yxz.com/c" ];
-
- var update = "n:1000\n";
- update += "i:test-phish-simple\n";
-
- var update1 = buildBareUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls }]);
- update += "u:data:," + encodeURIComponent(update1) + "\n";
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1",
- "urlsExist" : add1Urls
- };
-
- doTest([update], assertions, false);
-}
-
-function testSimpleForward() {
- var add1Urls = [ "foo-simple.com/a", "bar-simple.com/c" ];
- var add2Urls = [ "foo-simple.com/b" ];
- var add3Urls = [ "bar-simple.com/d" ];
-
- var update = "n:1000\n";
- update += "i:test-phish-simple\n";
-
- var update1 = buildBareUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls }]);
- update += "u:data:," + encodeURIComponent(update1) + "\n";
-
- var update2 = buildBareUpdate(
- [{ "chunkNum" : 2,
- "urls" : add2Urls }]);
- update += "u:data:," + encodeURIComponent(update2) + "\n";
-
- var update3 = buildBareUpdate(
- [{ "chunkNum" : 3,
- "urls" : add3Urls }]);
- update += "u:data:," + encodeURIComponent(update3) + "\n";
-
- var assertions = {
- "tableData" : "test-phish-simple;a:1-3",
- "urlsExist" : add1Urls.concat(add2Urls).concat(add3Urls)
- };
-
- doTest([update], assertions, false);
-}
-
-// Make sure that a nested forward (a forward within a forward) causes
-// the update to fail.
-function testNestedForward() {
- var add1Urls = [ "foo-nested.com/a", "bar-nested.com/c" ];
- var add2Urls = [ "foo-nested.com/b" ];
-
- var update = "n:1000\n";
- update += "i:test-phish-simple\n";
-
- var update1 = buildBareUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls }]);
- update += "u:data:," + encodeURIComponent(update1) + "\n";
-
- var update2 = buildBareUpdate(
- [{ "chunkNum" : 2 }]);
- var update3 = buildBareUpdate(
- [{ "chunkNum" : 3,
- "urls" : add1Urls }]);
-
- update2 += "u:data:," + encodeURIComponent(update3) + "\n";
-
- update += "u:data:," + encodeURIComponent(update2) + "\n";
-
- var assertions = {
- "tableData" : "",
- "urlsDontExist" : add1Urls.concat(add2Urls)
- };
-
- doTest([update], assertions, true);
-}
-
-// An invalid URL forward causes the update to fail.
-function testInvalidUrlForward() {
- var add1Urls = [ "foo-invalid.com/a", "bar-invalid.com/c" ];
-
- var update = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls }]);
- update += "u:asdf://blah/blah\n"; // invalid URL scheme
-
- // add1Urls is present, but that is an artifact of the way we do the test.
- var assertions = {
- "tableData" : "",
- "urlsExist" : add1Urls
- };
-
- doTest([update], assertions, true);
-}
-
-// A failed network request causes the update to fail.
-function testErrorUrlForward() {
- var add1Urls = [ "foo-forward.com/a", "bar-forward.com/c" ];
-
- var update = buildPhishingUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls }]);
- update += "u:http://test.invalid/asdf/asdf\n"; // invalid URL scheme
-
- // add1Urls is present, but that is an artifact of the way we do the test.
- var assertions = {
- "tableData" : "",
- "urlsExist" : add1Urls
- };
-
- doTest([update], assertions, true);
-}
-
-function testMultipleTables() {
- var add1Urls = [ "foo-multiple.com/a", "bar-multiple.com/c" ];
- var add2Urls = [ "foo-multiple.com/b" ];
- var add3Urls = [ "bar-multiple.com/d" ];
- var add4Urls = [ "bar-multiple.com/e" ];
- var add6Urls = [ "bar-multiple.com/g" ];
-
- var update = "n:1000\n";
- update += "i:test-phish-simple\n";
-
- var update1 = buildBareUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls }]);
- update += "u:data:," + encodeURIComponent(update1) + "\n";
-
- var update2 = buildBareUpdate(
- [{ "chunkNum" : 2,
- "urls" : add2Urls }]);
- update += "u:data:," + encodeURIComponent(update2) + "\n";
-
- update += "i:test-malware-simple\n";
-
- var update3 = buildBareUpdate(
- [{ "chunkNum" : 3,
- "urls" : add3Urls }]);
- update += "u:data:," + encodeURIComponent(update3) + "\n";
-
- update += "i:test-unwanted-simple\n";
- var update4 = buildBareUpdate(
- [{ "chunkNum" : 4,
- "urls" : add4Urls }]);
- update += "u:data:," + encodeURIComponent(update4) + "\n";
-
- update += "i:test-block-simple\n";
- var update6 = buildBareUpdate(
- [{ "chunkNum" : 6,
- "urls" : add6Urls }]);
- update += "u:data:," + encodeURIComponent(update6) + "\n";
-
- var assertions = {
- "tableData" : "test-block-simple;a:6\ntest-malware-simple;a:3\ntest-phish-simple;a:1-2\ntest-unwanted-simple;a:4",
- "urlsExist" : add1Urls.concat(add2Urls),
- "malwareUrlsExist" : add3Urls,
- "unwantedUrlsExist" : add4Urls,
- "blockedUrlsExist" : add6Urls
- };
-
- doTest([update], assertions, false);
-}
-
-function testUrlInMultipleTables() {
- var add1Urls = [ "foo-forward.com/a" ];
-
- var update = "n:1000\n";
- update += "i:test-phish-simple\n";
-
- var update1 = buildBareUpdate(
- [{ "chunkNum" : 1,
- "urls" : add1Urls }]);
- update += "u:data:," + encodeURIComponent(update1) + "\n";
-
- update += "i:test-malware-simple\n";
- var update2 = buildBareUpdate(
- [{ "chunkNum" : 2,
- "urls" : add1Urls }]);
- update += "u:data:," + encodeURIComponent(update2) + "\n";
-
- update += "i:test-unwanted-simple\n";
- var update3 = buildBareUpdate(
- [{ "chunkNum" : 3,
- "urls" : add1Urls }]);
- update += "u:data:," + encodeURIComponent(update3) + "\n";
-
- var assertions = {
- "tableData" : "test-malware-simple;a:2\ntest-phish-simple;a:1\ntest-unwanted-simple;a:3",
- "urlExistInMultipleTables" : { url: add1Urls,
- tables: "test-malware-simple,test-phish-simple,test-unwanted-simple" }
- };
-
- doTest([update], assertions, false);
-}
-
-function Observer(callback) {
- this.observe = callback;
-}
-
-Observer.prototype =
-{
-QueryInterface: function(iid)
-{
- if (!iid.equals(Ci.nsISupports) &&
- !iid.equals(Ci.nsIObserver)) {
- throw Cr.NS_ERROR_NO_INTERFACE;
- }
- return this;
-}
-};
-
-// Tests a database reset request.
-function testReset() {
- // The moz-phish-simple table is populated separately from the other update in
- // a separate update request. Therefore it should not be reset when we run the
- // updates later in this function.
- var mozAddUrls = [ "moz-reset.com/a" ];
- var mozUpdate = buildMozPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : mozAddUrls
- }]);
-
- var dataUpdate = "data:," + encodeURIComponent(mozUpdate);
-
- streamUpdater.downloadUpdates(mozTables, "", true,
- dataUpdate, () => {}, updateError, updateError);
-
- var addUrls1 = [ "foo-reset.com/a", "foo-reset.com/b" ];
- var update1 = buildPhishingUpdate(
- [
- { "chunkNum" : 1,
- "urls" : addUrls1
- }]);
-
- var update2 = "n:1000\nr:pleasereset\n";
-
- var addUrls3 = [ "bar-reset.com/a", "bar-reset.com/b" ];
- var update3 = buildPhishingUpdate(
- [
- { "chunkNum" : 3,
- "urls" : addUrls3
- }]);
-
- var assertions = {
- "tableData" : "moz-phish-simple;a:1\ntest-phish-simple;a:3", // tables that should still be there.
- "mozPhishingUrlsExist" : mozAddUrls, // mozAddUrls added prior to the reset
- // but it should still exist after reset.
- "urlsExist" : addUrls3, // addUrls3 added after the reset.
- "urlsDontExist" : addUrls1 // addUrls1 added prior to the reset
- };
-
- // Use these update responses in order. The update request only
- // contains test-*-simple tables so the reset will only apply to these.
- doTest([update1, update2, update3], assertions, false);
-}
-
-
-function run_test()
-{
- runTests([
- testSimpleForward,
- testNestedForward,
- testInvalidUrlForward,
- testErrorUrlForward,
- testMultipleTables,
- testUrlInMultipleTables,
- testReset
- ]);
-}
-
-do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js b/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
deleted file mode 100644
index f7c51b956..000000000
--- a/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
+++ /dev/null
@@ -1,37 +0,0 @@
-function run_test() {
- let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"]
- .getService(Ci.nsIUrlClassifierUtils);
-
- // Test list name to threat type conversion.
-
- equal(urlUtils.convertListNameToThreatType("goog-malware-proto"), 1);
- equal(urlUtils.convertListNameToThreatType("googpub-phish-proto"), 2);
- equal(urlUtils.convertListNameToThreatType("goog-unwanted-proto"), 3);
- equal(urlUtils.convertListNameToThreatType("goog-phish-proto"), 5);
-
- try {
- urlUtils.convertListNameToThreatType("bad-list-name");
- ok(false, "Bad list name should lead to exception.");
- } catch (e) {}
-
- try {
- urlUtils.convertListNameToThreatType("bad-list-name");
- ok(false, "Bad list name should lead to exception.");
- } catch (e) {}
-
- // Test threat type to list name conversion.
- equal(urlUtils.convertThreatTypeToListNames(1), "goog-malware-proto");
- equal(urlUtils.convertThreatTypeToListNames(2), "googpub-phish-proto,test-phish-proto");
- equal(urlUtils.convertThreatTypeToListNames(3), "goog-unwanted-proto,test-unwanted-proto");
- equal(urlUtils.convertThreatTypeToListNames(5), "goog-phish-proto");
-
- try {
- urlUtils.convertThreatTypeToListNames(0);
- ok(false, "Bad threat type should lead to exception.");
- } catch (e) {}
-
- try {
- urlUtils.convertThreatTypeToListNames(100);
- ok(false, "Bad threat type should lead to exception.");
- } catch (e) {}
-} \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/xpcshell.ini b/toolkit/components/url-classifier/tests/unit/xpcshell.ini
deleted file mode 100644
index c34d575c6..000000000
--- a/toolkit/components/url-classifier/tests/unit/xpcshell.ini
+++ /dev/null
@@ -1,24 +0,0 @@
-[DEFAULT]
-head = head_urlclassifier.js
-tail = tail_urlclassifier.js
-skip-if = toolkit == 'android'
-support-files =
- data/digest1.chunk
- data/digest2.chunk
-
-[test_addsub.js]
-[test_bug1274685_unowned_list.js]
-[test_backoff.js]
-[test_dbservice.js]
-[test_hashcompleter.js]
-# Bug 752243: Profile cleanup frequently fails
-#skip-if = os == "mac" || os == "linux"
-[test_partial.js]
-[test_prefixset.js]
-[test_threat_type_conversion.js]
-[test_provider_url.js]
-[test_streamupdater.js]
-[test_digest256.js]
-[test_listmanager.js]
-[test_pref.js]
-[test_safebrowsing_protobuf.js]
diff --git a/toolkit/components/url-classifier/tests/unittests.xul b/toolkit/components/url-classifier/tests/unittests.xul
deleted file mode 100644
index 0c9ce898b..000000000
--- a/toolkit/components/url-classifier/tests/unittests.xul
+++ /dev/null
@@ -1,188 +0,0 @@
-<?xml version="1.0"?>
-<window id="PROT_unittest"
- xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul"
- onload="onProtUnittestLoad();"
- title="prot unittests">
-
-<script><![CDATA[
- const Cc = Components.classes;
- const Ci = Components.interfaces;
-
- function G_Debug(zone, s) {
- var label = document.createElement('label');
- var txt = "[" + zone + "] " + s;
- label.appendChild(document.createTextNode(txt));
-
- document.documentElement.appendChild(label);
- }
-
- function G_Assert(zone, cond, msg) {
- if (!cond) {
- G_Debug(zone, msg);
- throw msg;
- }
- }
-
- function ProtectionTableTests() {
- var z = "trtable UNITTEST";
-
- G_Debug(z, "Starting");
-
- var url = "http://www.yahoo.com?foo=bar";
- var url2 = "http://168.188.99.26/.secure/www.ebay.com/";
- var urlTable = Cc['@mozilla.org/url-classifier/table;1?type=url']
- .createInstance(Ci.nsIUrlClassifierTable);
- urlTable.insert(url, "1");
- urlTable.insert(url2, "1");
- G_Assert(z, urlTable.exists(url), "URL lookups broken");
- G_Assert(z, !urlTable.exists("about:config"), "about:config breaks domlook");
- G_Assert(z, urlTable.exists(url2), "URL lookups broken");
- G_Assert(z, urlTable.exists("http://%31%36%38%2e%31%38%38%2e%39%39%2e%32%36/%2E%73%65%63%75%72%65/%77%77%77%2E%65%62%61%79%2E%63%6F%6D/") == true,
- "URL Canonicalization broken");
- G_Assert(z, urlTable.count == 2, 'urlTable: wrong size');
-
- var dom1 = "bar.com";
- var dom2 = "amazon.co.uk";
- var dom3 = "127.0.0.1";
- var domainTable = Cc['@mozilla.org/url-classifier/table;1?type=domain']
- .createInstance(Ci.nsIUrlClassifierTable);
- domainTable.insert(dom1, "1");
- domainTable.insert(dom2, "1");
- domainTable.insert(dom3, "1");
- G_Assert(z, domainTable.exists("http://www.bar.com/?zaz=asdf#url"),
- "Domain lookups broken (single dot)");
- G_Assert(z, domainTable.exists("http://www.amazon.co.uk/?z=af#url"),
- "Domain lookups broken (two dots)");
- G_Assert(z, domainTable.exists("http://127.0.0.1/?z=af#url"),
- "Domain lookups broken (IP)");
- G_Assert(z, domainTable.count == 3, 'domainTable: wrong size');
-
- var site1 = "google.com/safebrowsing/";
- var site2 = "www.foo.bar/";
- var site3 = "127.0.0.1/";
- var siteTable = Cc['@mozilla.org/url-classifier/table;1?type=site']
- .createInstance(Ci.nsIUrlClassifierTable);
- siteTable.insert(site1, "1");
- siteTable.insert(site2, "1");
- siteTable.insert(site3, "1");
- G_Assert(z, siteTable.exists("http://www.google.com/safebrowsing/1.php"),
- "Site lookups broken - reducing");
- G_Assert(z, siteTable.exists("http://www.foo.bar/some/random/path"),
- "Site lookups broken - fqdn");
- G_Assert(z, siteTable.exists("http://127.0.0.1/something?hello=1"),
- "Site lookups broken - IP");
- G_Assert(z, !siteTable.exists("http://www.google.com/search/"),
- "Site lookups broken - overreaching");
- G_Assert(z, siteTable.count == 3, 'siteTable: wrong size');
-
- var url1 = "http://poseidon.marinet.gr/~eleni/eBay/index.php";
- var domainHash = "01844755C8143C4579BB28DD59C23747";
- var enchashTable = Cc['@mozilla.org/url-classifier/table;1?type=enchash']
- .createInstance(Ci.nsIUrlClassifierTable);
- enchashTable.insert(domainHash, "bGtEQWJuMl9FA3Kl5RiXMpgFU8nDJl9J0hXjUck9+"
- + "mMUQwAN6llf0gJeY5DIPPc2f+a8MSBFJN17ANGJ"
- + "Zl5oZVsQfSW4i12rlScsx4tweZAE");
- G_Assert(z, enchashTable.exists(url1), 'enchash lookup failed');
- G_Assert(z, !enchashTable.exists(url1 + '/foo'),
- "enchash lookup broken - overreaching");
- G_Assert(z, enchashTable.count == 1, 'enchashTable: wrong size');
-
- // TODO: test replace
- G_Debug(z, "PASSED");
- }
-
- function ProtectionListManagerTests() {
- var z = "listmanager UNITTEST";
- G_Debug(z, "Starting");
-
- // test lookup and register
- var listManagerInst = Cc["@mozilla.org/url-classifier/listmanager;1"]
- .createInstance(Ci.nsIUrlListManager);
- var listName = 'foo-bar-url';
- listManagerInst.registerTable(listName, false);
- listManagerInst.safeInsert(listName, 'test', '1');
- G_Assert(z, listManagerInst.safeExists(listName, 'test'),
- 'insert/exist failed');
-
- // test serialization
- var baseName = (new Date().getTime()) + ".tmp";
- var tempDir = Cc["@mozilla.org/file/directory_service;1"]
- .getService(Ci.nsIProperties)
- .get("TmpD", Ci.nsILocalFile);
- tempDir.append(baseName);
- tempDir.createUnique(tempDir.DIRECTORY_TYPE, 0744);
-
- var listManager = Cc["@mozilla.org/url-classifier/listmanager;1"]
- .getService(Ci.nsIUrlListManager);
- listManager.setAppDir(tempDir);
-
- var data = "";
-
- var set1Name = "test1-foo-domain";
- data += "[" + set1Name + " 1.2]\n";
- var set1 = {};
- for (var i = 0; i < 10; i++) {
- set1["http://" + i + ".com"] = 1;
- data += "+" + i + ".com\t1\n";
- }
-
- data += "\n";
- var set2Name = "test2-foo-domain";
- // TODO must have blank line
- data += "\n[" + set2Name + " 1.7]\n";
- var set2 = {};
- for (var i = 0; i < 5; i++) {
- set2["http://" + i + ".com"] = 1;
- data += "+" + i + ".com\t1\n";
- }
-
- function deserialized(tablesKnown, tablesData) {
- listManager.wrappedJSObject.dataReady(tablesKnown, tablesData);
-
- var file = tempDir.clone();
- file.append(set1Name + ".sst");
- G_Assert(z, file.exists() && file.isFile() && file.isReadable(),
- "Failed to write out: " + file.path);
-
- file = tempDir.clone();
- file.append(set2Name + ".sst");
- G_Assert(z, file.exists() && file.isFile() && file.isReadable(),
- "Failed to write out: " + file.path);
-
- // now try to read them back from disk
- listManager = Cc["@mozilla.org/url-classifier/listmanager;1"]
- .createInstance(Ci.nsIUrlListManager);
- listManager.setAppDir(tempDir);
- var tables = [ set1Name, set2Name ];
- listManager.enableUpdate(set1Name);
- listManager.enableUpdate(set2Name);
- listManager.wrappedJSObject.readDataFiles();
-
- // assert that the values match
- for (var prop in set1) {
- G_Assert(z,
- listManager.wrappedJSObject.tablesData[set1Name].exists(prop),
- "Couldn't find member " + prop + "of set1 from disk.");
- }
-
- for (var prop in set2) {
- G_Assert(z,
- listManager.wrappedJSObject.tablesData[set2Name].exists(prop),
- "Couldn't find member " + prop + "of set2 from disk.");
- }
-
- tempDir.remove(true);
-
- G_Debug(z, "PASSED");
- };
-
- // Use the unwrapped object for the unittest
- listManager.wrappedJSObject.deserialize_(data, deserialized);
- }
-
- function onProtUnittestLoad() {
- ProtectionTableTests();
- ProtectionListManagerTests();
- }
-]]></script>
-</window>