diff options
author | Matt A. Tobin <mattatobin@localhost.localdomain> | 2018-02-02 04:16:08 -0500 |
---|---|---|
committer | Matt A. Tobin <mattatobin@localhost.localdomain> | 2018-02-02 04:16:08 -0500 |
commit | 5f8de423f190bbb79a62f804151bc24824fa32d8 (patch) | |
tree | 10027f336435511475e392454359edea8e25895d /toolkit/components/url-classifier/tests/gtest | |
parent | 49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff) | |
download | UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip |
Add m-esr52 at 52.6.0
Diffstat (limited to 'toolkit/components/url-classifier/tests/gtest')
15 files changed, 2730 insertions, 0 deletions
diff --git a/toolkit/components/url-classifier/tests/gtest/Common.cpp b/toolkit/components/url-classifier/tests/gtest/Common.cpp new file mode 100644 index 000000000..b5f024b38 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/Common.cpp @@ -0,0 +1,78 @@ +#include "Common.h" +#include "HashStore.h" +#include "Classifier.h" +#include "nsAppDirectoryServiceDefs.h" +#include "nsTArray.h" +#include "nsIThread.h" +#include "nsThreadUtils.h" +#include "nsUrlClassifierUtils.h" + +using namespace mozilla; +using namespace mozilla::safebrowsing; + +template<typename Function> +void RunTestInNewThread(Function&& aFunction) { + nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(mozilla::Forward<Function>(aFunction)); + nsCOMPtr<nsIThread> testingThread; + nsresult rv = NS_NewThread(getter_AddRefs(testingThread), r); + ASSERT_EQ(rv, NS_OK); + testingThread->Shutdown(); +} + +already_AddRefed<nsIFile> +GetFile(const nsTArray<nsString>& path) +{ + nsCOMPtr<nsIFile> file; + nsresult rv = NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return nullptr; + } + + for (uint32_t i = 0; i < path.Length(); i++) { + file->Append(path[i]); + } + return file.forget(); +} + +void ApplyUpdate(nsTArray<TableUpdate*>& updates) +{ + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + + UniquePtr<Classifier> classifier(new Classifier()); + classifier->Open(*file); + + { + // Force nsIUrlClassifierUtils loading on main thread + // because nsIUrlClassifierDBService will not run in advance + // in gtest. + nsresult rv; + nsCOMPtr<nsIUrlClassifierUtils> dummy = + do_GetService(NS_URLCLASSIFIERUTILS_CONTRACTID, &rv); + ASSERT_TRUE(NS_SUCCEEDED(rv)); + } + + RunTestInNewThread([&] () -> void { + classifier->ApplyUpdates(&updates); + }); +} + +void ApplyUpdate(TableUpdate* update) +{ + nsTArray<TableUpdate*> updates = { update }; + ApplyUpdate(updates); +} + +void +PrefixArrayToPrefixStringMap(const nsTArray<nsCString>& prefixArray, + PrefixStringMap& out) +{ + out.Clear(); + + for (uint32_t i = 0; i < prefixArray.Length(); i++) { + const nsCString& prefix = prefixArray[i]; + nsCString* prefixString = out.LookupOrAdd(prefix.Length()); + prefixString->Append(prefix.BeginReading(), prefix.Length()); + } +} + diff --git a/toolkit/components/url-classifier/tests/gtest/Common.h b/toolkit/components/url-classifier/tests/gtest/Common.h new file mode 100644 index 000000000..c9a9cdf7e --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/Common.h @@ -0,0 +1,26 @@ +#include "HashStore.h" +#include "nsIFile.h" +#include "nsTArray.h" +#include "gtest/gtest.h" + +using namespace mozilla; +using namespace mozilla::safebrowsing; + +template<typename Function> +void RunTestInNewThread(Function&& aFunction); + +// Return nsIFile with root directory - NS_APP_USER_PROFILE_50_DIR +// Sub-directories are passed in path argument. +already_AddRefed<nsIFile> +GetFile(const nsTArray<nsString>& path); + +// ApplyUpdate will call |ApplyUpdates| of Classifier within a new thread +void ApplyUpdate(nsTArray<TableUpdate*>& updates); + +void ApplyUpdate(TableUpdate* update); + +// This function converts lexigraphic-sorted prefixes to a hashtable +// which key is prefix size and value is concatenated prefix string. +void PrefixArrayToPrefixStringMap(const nsTArray<nsCString>& prefixArray, + PrefixStringMap& out); + diff --git a/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp b/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp new file mode 100644 index 000000000..dba2fc2c1 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp @@ -0,0 +1,279 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include <stdio.h> +#include <stdlib.h> +#include <set> + +#include "gtest/gtest.h" +#include "ChunkSet.h" +#include "mozilla/ArrayUtils.h" + +TEST(UrlClassifierChunkSet, Empty) +{ + mozilla::safebrowsing::ChunkSet chunkSet; + mozilla::safebrowsing::ChunkSet removeSet; + + removeSet.Set(0); + + ASSERT_FALSE(chunkSet.Has(0)); + ASSERT_FALSE(chunkSet.Has(1)); + ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK); + ASSERT_TRUE(chunkSet.Length() == 0); + + chunkSet.Set(0); + + ASSERT_TRUE(chunkSet.Has(0)); + ASSERT_TRUE(chunkSet.Length() == 1); + ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK); + ASSERT_FALSE(chunkSet.Has(0)); + ASSERT_TRUE(chunkSet.Length() == 0); +} + +TEST(UrlClassifierChunkSet, Main) +{ + static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13}; + + mozilla::safebrowsing::ChunkSet chunkSet; + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + chunkSet.Set(testVals[i]); + } + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + ASSERT_TRUE(chunkSet.Has(testVals[i])); + } + + ASSERT_FALSE(chunkSet.Has(3)); + ASSERT_FALSE(chunkSet.Has(4)); + ASSERT_FALSE(chunkSet.Has(9)); + ASSERT_FALSE(chunkSet.Has(11)); + + ASSERT_TRUE(chunkSet.Length() == MOZ_ARRAY_LENGTH(testVals)); +} + +TEST(UrlClassifierChunkSet, Merge) +{ + static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13}; + static int mergeVals[] = {9, 3, 4, 20, 14, 16}; + + mozilla::safebrowsing::ChunkSet chunkSet; + mozilla::safebrowsing::ChunkSet mergeSet; + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + chunkSet.Set(testVals[i]); + } + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + mergeSet.Set(mergeVals[i]); + } + + chunkSet.Merge(mergeSet); + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + ASSERT_TRUE(chunkSet.Has(testVals[i])); + } + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + ASSERT_TRUE(chunkSet.Has(mergeVals[i])); + } + + // -1 because 14 is duplicated in both sets + ASSERT_TRUE(chunkSet.Length() == + MOZ_ARRAY_LENGTH(testVals) + MOZ_ARRAY_LENGTH(mergeVals) - 1); + + ASSERT_FALSE(chunkSet.Has(11)); + ASSERT_FALSE(chunkSet.Has(15)); + ASSERT_FALSE(chunkSet.Has(17)); + ASSERT_FALSE(chunkSet.Has(18)); + ASSERT_FALSE(chunkSet.Has(19)); +} + +TEST(UrlClassifierChunkSet, Merge2) +{ + static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13}; + static int mergeVals[] = {9, 3, 4, 20, 14, 16}; + static int mergeVals2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}; + + mozilla::safebrowsing::ChunkSet chunkSet; + mozilla::safebrowsing::ChunkSet mergeSet; + mozilla::safebrowsing::ChunkSet mergeSet2; + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + chunkSet.Set(testVals[i]); + } + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + mergeSet.Set(mergeVals[i]); + } + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals2); i++) { + mergeSet2.Set(mergeVals2[i]); + } + + chunkSet.Merge(mergeSet); + chunkSet.Merge(mergeSet2); + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + ASSERT_TRUE(chunkSet.Has(testVals[i])); + } + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + ASSERT_TRUE(chunkSet.Has(mergeVals[i])); + } + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals2); i++) { + ASSERT_TRUE(chunkSet.Has(mergeVals2[i])); + } + + ASSERT_FALSE(chunkSet.Has(15)); + ASSERT_FALSE(chunkSet.Has(17)); + ASSERT_FALSE(chunkSet.Has(18)); + ASSERT_FALSE(chunkSet.Has(19)); +} + +TEST(UrlClassifierChunkSet, Stress) +{ + mozilla::safebrowsing::ChunkSet chunkSet; + mozilla::safebrowsing::ChunkSet mergeSet; + std::set<int> refSet; + std::set<int> refMergeSet; + static const int TEST_ITERS = 7000; + static const int REMOVE_ITERS = 3000; + static const int TEST_RANGE = 10000; + + // Construction by Set + for (int i = 0; i < TEST_ITERS; i++) { + int chunk = rand() % TEST_RANGE; + chunkSet.Set(chunk); + refSet.insert(chunk); + } + + // Same elements as reference set + for (auto it = refSet.begin(); it != refSet.end(); ++it) { + ASSERT_TRUE(chunkSet.Has(*it)); + } + + // Hole punching via Remove + for (int i = 0; i < REMOVE_ITERS; i++) { + int chunk = rand() % TEST_RANGE; + mozilla::safebrowsing::ChunkSet helpChunk; + helpChunk.Set(chunk); + + chunkSet.Remove(helpChunk); + refSet.erase(chunk); + + ASSERT_FALSE(chunkSet.Has(chunk)); + } + + // Should have chunks present in reference set + // Should not have chunks absent in reference set + for (int it = 0; it < TEST_RANGE; ++it) { + auto found = refSet.find(it); + if (chunkSet.Has(it)) { + ASSERT_FALSE(found == refSet.end()); + } else { + ASSERT_TRUE(found == refSet.end()); + } + } + + // Construct set to merge with + for (int i = 0; i < TEST_ITERS; i++) { + int chunk = rand() % TEST_RANGE; + mergeSet.Set(chunk); + refMergeSet.insert(chunk); + } + + // Merge set constructed correctly + for (auto it = refMergeSet.begin(); it != refMergeSet.end(); ++it) { + ASSERT_TRUE(mergeSet.Has(*it)); + } + + mozilla::safebrowsing::ChunkSet origSet; + origSet = chunkSet; + + chunkSet.Merge(mergeSet); + refSet.insert(refMergeSet.begin(), refMergeSet.end()); + + // Check for presence of elements from both source + // Should not have chunks absent in reference set + for (int it = 0; it < TEST_RANGE; ++it) { + auto found = refSet.find(it); + if (chunkSet.Has(it)) { + ASSERT_FALSE(found == refSet.end()); + } else { + ASSERT_TRUE(found == refSet.end()); + } + } + + // Unmerge + chunkSet.Remove(origSet); + for (int it = 0; it < TEST_RANGE; ++it) { + if (origSet.Has(it)) { + ASSERT_FALSE(chunkSet.Has(it)); + } else if (mergeSet.Has(it)) { + ASSERT_TRUE(chunkSet.Has(it)); + } + } +} + +TEST(UrlClassifierChunkSet, RemoveClear) +{ + static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13}; + static int mergeVals[] = {3, 4, 9, 16, 20}; + + mozilla::safebrowsing::ChunkSet chunkSet; + mozilla::safebrowsing::ChunkSet mergeSet; + mozilla::safebrowsing::ChunkSet removeSet; + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + chunkSet.Set(testVals[i]); + removeSet.Set(testVals[i]); + } + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + mergeSet.Set(mergeVals[i]); + } + + ASSERT_TRUE(chunkSet.Merge(mergeSet) == NS_OK); + ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK); + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + ASSERT_TRUE(chunkSet.Has(mergeVals[i])); + } + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + ASSERT_FALSE(chunkSet.Has(testVals[i])); + } + + chunkSet.Clear(); + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + ASSERT_FALSE(chunkSet.Has(mergeVals[i])); + } +} + +TEST(UrlClassifierChunkSet, Serialize) +{ + static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13}; + static int mergeVals[] = {3, 4, 9, 16, 20}; + + mozilla::safebrowsing::ChunkSet chunkSet; + mozilla::safebrowsing::ChunkSet mergeSet; + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) { + chunkSet.Set(testVals[i]); + } + + for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) { + mergeSet.Set(mergeVals[i]); + } + + chunkSet.Merge(mergeSet); + + nsAutoCString mergeResult; + chunkSet.Serialize(mergeResult); + + printf("mergeResult: %s\n", mergeResult.get()); + + nsAutoCString expected(NS_LITERAL_CSTRING("1-10,12-14,16,20")); + + ASSERT_TRUE(mergeResult.Equals(expected)); +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp b/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp new file mode 100644 index 000000000..bdb9eebb0 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp @@ -0,0 +1,97 @@ +#include "HashStore.h" +#include "nsPrintfCString.h" +#include "string.h" +#include "gtest/gtest.h" +#include "mozilla/Unused.h" + +using namespace mozilla; +using namespace mozilla::safebrowsing; + +static const char* kFilesInV2[] = {".pset", ".sbstore"}; +static const char* kFilesInV4[] = {".pset", ".metadata"}; + +#define V2_TABLE "gtest-malware-simple" +#define V4_TABLE1 "goog-malware-proto" +#define V4_TABLE2 "goog-phish-proto" + +#define ROOT_DIR NS_LITERAL_STRING("safebrowsing") +#define SB_FILE(x, y) NS_ConvertUTF8toUTF16(nsPrintfCString("%s%s",x, y)) + +template<typename T, size_t N> +void CheckFileExist(const char* table, const T (&files)[N], bool expectExists) +{ + for (uint32_t i = 0; i < N; i++) { + // This is just a quick way to know if this is v4 table + NS_ConvertUTF8toUTF16 SUB_DIR(strstr(table, "-proto") ? "google4" : ""); + nsCOMPtr<nsIFile> file = + GetFile(nsTArray<nsString> { ROOT_DIR, SUB_DIR, SB_FILE(table, files[i]) }); + + bool exists; + file->Exists(&exists); + + nsAutoCString path; + file->GetNativePath(path); + ASSERT_EQ(expectExists, exists) << path.get(); + } +} + +TEST(FailUpdate, CheckTableReset) +{ + const bool FULL_UPDATE = true; + const bool PARTIAL_UPDATE = false; + + // Apply V2 update + { + auto update = new TableUpdateV2(NS_LITERAL_CSTRING(V2_TABLE)); + Unused << update->NewAddChunk(1); + + ApplyUpdate(update); + + // A successful V2 update should create .pset & .sbstore files + CheckFileExist(V2_TABLE, kFilesInV2, true); + } + + // Helper function to generate table update data + auto func = [](TableUpdateV4* update, bool full, const char* str) { + update->SetFullUpdate(full); + std::string prefix(str); + update->NewPrefixes(prefix.length(), prefix); + }; + + // Apply V4 update for table1 + { + auto update = new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE1)); + func(update, FULL_UPDATE, "test_prefix"); + + ApplyUpdate(update); + + // A successful V4 update should create .pset & .metadata files + CheckFileExist(V4_TABLE1, kFilesInV4, true); + } + + // Apply V4 update for table2 + { + auto update = new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE2)); + func(update, FULL_UPDATE, "test_prefix"); + + ApplyUpdate(update); + + CheckFileExist(V4_TABLE2, kFilesInV4, true); + } + + // Apply V4 update with the same prefix in previous full udpate + // This should cause an update error. + { + auto update = new TableUpdateV4(NS_LITERAL_CSTRING(V4_TABLE1)); + func(update, PARTIAL_UPDATE, "test_prefix"); + + ApplyUpdate(update); + + // A fail update should remove files for that table + CheckFileExist(V4_TABLE1, kFilesInV4, false); + + // A fail update should NOT remove files for the other tables + CheckFileExist(V2_TABLE, kFilesInV2, true); + CheckFileExist(V4_TABLE2, kFilesInV4, true); + } +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp b/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp new file mode 100644 index 000000000..00525f704 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp @@ -0,0 +1,88 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "LookupCacheV4.h" +#include "Common.h" + +#define GTEST_SAFEBROWSING_DIR NS_LITERAL_CSTRING("safebrowsing") +#define GTEST_TABLE NS_LITERAL_CSTRING("gtest-malware-proto") + +typedef nsCString _Fragment; +typedef nsTArray<nsCString> _PrefixArray; + +// Generate a hash prefix from string +static const nsCString +GeneratePrefix(const _Fragment& aFragment, uint8_t aLength) +{ + Completion complete; + nsCOMPtr<nsICryptoHash> cryptoHash = do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID); + complete.FromPlaintext(aFragment, cryptoHash); + + nsCString hash; + hash.Assign((const char *)complete.buf, aLength); + return hash; +} + +static UniquePtr<LookupCacheV4> +SetupLookupCacheV4(const _PrefixArray& prefixArray) +{ + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + + file->AppendNative(GTEST_SAFEBROWSING_DIR); + + UniquePtr<LookupCacheV4> cache = MakeUnique<LookupCacheV4>(GTEST_TABLE, EmptyCString(), file); + nsresult rv = cache->Init(); + EXPECT_EQ(rv, NS_OK); + + PrefixStringMap map; + PrefixArrayToPrefixStringMap(prefixArray, map); + rv = cache->Build(map); + EXPECT_EQ(rv, NS_OK); + + return Move(cache); +} + +void +TestHasPrefix(const _Fragment& aFragment, bool aExpectedHas, bool aExpectedComplete) +{ + _PrefixArray array = { GeneratePrefix(_Fragment("bravo.com/"), 32), + GeneratePrefix(_Fragment("browsing.com/"), 8), + GeneratePrefix(_Fragment("gound.com/"), 5), + GeneratePrefix(_Fragment("small.com/"), 4) + }; + + RunTestInNewThread([&] () -> void { + UniquePtr<LookupCache> cache = SetupLookupCacheV4(array); + + Completion lookupHash; + nsCOMPtr<nsICryptoHash> cryptoHash = do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID); + lookupHash.FromPlaintext(aFragment, cryptoHash); + + bool has, complete; + nsresult rv = cache->Has(lookupHash, &has, &complete); + + EXPECT_EQ(rv, NS_OK); + EXPECT_EQ(has, aExpectedHas); + EXPECT_EQ(complete, aExpectedComplete); + + cache->ClearAll(); + }); + +} + +TEST(LookupCacheV4, HasComplete) +{ + TestHasPrefix(_Fragment("bravo.com/"), true, true); +} + +TEST(LookupCacheV4, HasPrefix) +{ + TestHasPrefix(_Fragment("browsing.com/"), true, false); +} + +TEST(LookupCacheV4, Nomatch) +{ + TestHasPrefix(_Fragment("nomatch.com/"), false, false); +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp b/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp new file mode 100644 index 000000000..72ff08a1e --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp @@ -0,0 +1,98 @@ +#include "LookupCache.h" +#include "LookupCacheV4.h" +#include "HashStore.h" +#include "gtest/gtest.h" +#include "nsAppDirectoryServiceDefs.h" + +namespace mozilla { +namespace safebrowsing { + +class PerProviderDirectoryTestUtils { +public: + template<typename T> + static nsIFile* InspectStoreDirectory(const T& aT) + { + return aT.mStoreDirectory; + } +}; + +} // end of namespace safebrowsing +} // end of namespace mozilla + +using namespace mozilla; +using namespace mozilla::safebrowsing; + +template<typename T> +void VerifyPrivateStorePath(const char* aTableName, + const char* aProvider, + nsIFile* aRootDir, + bool aUsePerProviderStore) +{ + nsString rootStorePath; + nsresult rv = aRootDir->GetPath(rootStorePath); + EXPECT_EQ(rv, NS_OK); + + T target(nsCString(aTableName), nsCString(aProvider), aRootDir); + + nsIFile* privateStoreDirectory = + PerProviderDirectoryTestUtils::InspectStoreDirectory(target); + + nsString privateStorePath; + rv = privateStoreDirectory->GetPath(privateStorePath); + ASSERT_EQ(rv, NS_OK); + + nsString expectedPrivateStorePath = rootStorePath; + + if (aUsePerProviderStore) { + // Use API to append "provider" to the root directoy path + nsCOMPtr<nsIFile> expectedPrivateStoreDir; + rv = aRootDir->Clone(getter_AddRefs(expectedPrivateStoreDir)); + ASSERT_EQ(rv, NS_OK); + + expectedPrivateStoreDir->AppendNative(nsCString(aProvider)); + rv = expectedPrivateStoreDir->GetPath(expectedPrivateStorePath); + ASSERT_EQ(rv, NS_OK); + } + + printf("table: %s\nprovider: %s\nroot path: %s\nprivate path: %s\n\n", + aTableName, + aProvider, + NS_ConvertUTF16toUTF8(rootStorePath).get(), + NS_ConvertUTF16toUTF8(privateStorePath).get()); + + ASSERT_TRUE(privateStorePath == expectedPrivateStorePath); +} + +TEST(PerProviderDirectory, LookupCache) +{ + RunTestInNewThread([] () -> void { + nsCOMPtr<nsIFile> rootDir; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir)); + + // For V2 tables (NOT ending with '-proto'), root directory should be + // used as the private store. + VerifyPrivateStorePath<LookupCacheV2>("goog-phish-shavar", "google", rootDir, false); + + // For V4 tables, if provider is found, use per-provider subdirectory; + // If not found, use root directory. + VerifyPrivateStorePath<LookupCacheV4>("goog-noprovider-proto", "", rootDir, false); + VerifyPrivateStorePath<LookupCacheV4>("goog-phish-proto", "google4", rootDir, true); + }); +} + +TEST(PerProviderDirectory, HashStore) +{ + RunTestInNewThread([] () -> void { + nsCOMPtr<nsIFile> rootDir; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir)); + + // For V2 tables (NOT ending with '-proto'), root directory should be + // used as the private store. + VerifyPrivateStorePath<HashStore>("goog-phish-shavar", "google", rootDir, false); + + // For V4 tables, if provider is found, use per-provider subdirectory; + // If not found, use root directory. + VerifyPrivateStorePath<HashStore>("goog-noprovider-proto", "", rootDir, false); + VerifyPrivateStorePath<HashStore>("goog-phish-proto", "google4", rootDir, true); + }); +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp b/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp new file mode 100644 index 000000000..ea6ffb5e6 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp @@ -0,0 +1,159 @@ +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +#include "gtest/gtest.h" +#include "ProtocolParser.h" +#include "mozilla/EndianUtils.h" + +using namespace mozilla; +using namespace mozilla::safebrowsing; + +typedef FetchThreatListUpdatesResponse_ListUpdateResponse ListUpdateResponse; + +static bool +InitUpdateResponse(ListUpdateResponse* aUpdateResponse, + ThreatType aThreatType, + const nsACString& aState, + const nsACString& aChecksum, + bool isFullUpdate, + const nsTArray<uint32_t>& aFixedLengthPrefixes, + bool aDoPrefixEncoding); + +static void +DumpBinary(const nsACString& aBinary); + +TEST(ProtocolParser, UpdateWait) +{ + // Top level response which contains a list of update response + // for different lists. + FetchThreatListUpdatesResponse response; + + auto r = response.mutable_list_update_responses()->Add(); + InitUpdateResponse(r, SOCIAL_ENGINEERING_PUBLIC, + nsCString("sta\x00te", 6), + nsCString("check\x0sum", 9), + true, + {0, 1, 2, 3}, + false /* aDoPrefixEncoding */ ); + + // Set min wait duration. + auto minWaitDuration = response.mutable_minimum_wait_duration(); + minWaitDuration->set_seconds(8); + minWaitDuration->set_nanos(1 * 1000000000); + + std::string s; + response.SerializeToString(&s); + + DumpBinary(nsCString(s.c_str(), s.length())); + + ProtocolParser* p = new ProtocolParserProtobuf(); + p->AppendStream(nsCString(s.c_str(), s.length())); + p->End(); + ASSERT_EQ(p->UpdateWaitSec(), 9u); + delete p; +} + +TEST(ProtocolParser, SingleValueEncoding) +{ + // Top level response which contains a list of update response + // for different lists. + FetchThreatListUpdatesResponse response; + + auto r = response.mutable_list_update_responses()->Add(); + + const char* expectedPrefix = "\x00\x01\x02\x00"; + if (!InitUpdateResponse(r, SOCIAL_ENGINEERING_PUBLIC, + nsCString("sta\x00te", 6), + nsCString("check\x0sum", 9), + true, + // As per spec, we should interpret the prefix as uint32 + // in little endian before encoding. + {LittleEndian::readUint32(expectedPrefix)}, + true /* aDoPrefixEncoding */ )) { + printf("Failed to initialize update response."); + ASSERT_TRUE(false); + return; + } + + // Set min wait duration. + auto minWaitDuration = response.mutable_minimum_wait_duration(); + minWaitDuration->set_seconds(8); + minWaitDuration->set_nanos(1 * 1000000000); + + std::string s; + response.SerializeToString(&s); + + // Feed data to the protocol parser. + ProtocolParser* p = new ProtocolParserProtobuf(); + p->SetRequestedTables({ nsCString("googpub-phish-proto") }); + p->AppendStream(nsCString(s.c_str(), s.length())); + p->End(); + + auto& tus = p->GetTableUpdates(); + auto tuv4 = TableUpdate::Cast<TableUpdateV4>(tus[0]); + auto& prefixMap = tuv4->Prefixes(); + for (auto iter = prefixMap.Iter(); !iter.Done(); iter.Next()) { + // This prefix map should contain only a single 4-byte prefixe. + ASSERT_EQ(iter.Key(), 4u); + + // The fixed-length prefix string from ProtcolParser should + // exactly match the expected prefix string. + auto& prefix = iter.Data()->GetPrefixString(); + ASSERT_TRUE(prefix.Equals(nsCString(expectedPrefix, 4))); + } + + delete p; +} + +static bool +InitUpdateResponse(ListUpdateResponse* aUpdateResponse, + ThreatType aThreatType, + const nsACString& aState, + const nsACString& aChecksum, + bool isFullUpdate, + const nsTArray<uint32_t>& aFixedLengthPrefixes, + bool aDoPrefixEncoding) +{ + aUpdateResponse->set_threat_type(aThreatType); + aUpdateResponse->set_new_client_state(aState.BeginReading(), aState.Length()); + aUpdateResponse->mutable_checksum()->set_sha256(aChecksum.BeginReading(), aChecksum.Length()); + aUpdateResponse->set_response_type(isFullUpdate ? ListUpdateResponse::FULL_UPDATE + : ListUpdateResponse::PARTIAL_UPDATE); + + auto additions = aUpdateResponse->mutable_additions()->Add(); + + if (!aDoPrefixEncoding) { + additions->set_compression_type(RAW); + auto rawHashes = additions->mutable_raw_hashes(); + rawHashes->set_prefix_size(4); + auto prefixes = rawHashes->mutable_raw_hashes(); + for (auto p : aFixedLengthPrefixes) { + char buffer[4]; + NativeEndian::copyAndSwapToBigEndian(buffer, &p, 1); + prefixes->append(buffer, 4); + } + return true; + } + + if (1 != aFixedLengthPrefixes.Length()) { + printf("This function only supports single value encoding.\n"); + return false; + } + + uint32_t firstValue = aFixedLengthPrefixes[0]; + additions->set_compression_type(RICE); + auto riceHashes = additions->mutable_rice_hashes(); + riceHashes->set_first_value(firstValue); + riceHashes->set_num_entries(0); + + return true; +} + +static void DumpBinary(const nsACString& aBinary) +{ + nsCString s; + for (size_t i = 0; i < aBinary.Length(); i++) { + s.AppendPrintf("\\x%.2X", (uint8_t)aBinary[i]); + } + printf("%s\n", s.get()); +}
\ No newline at end of file diff --git a/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp b/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp new file mode 100644 index 000000000..f03d27358 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp @@ -0,0 +1,165 @@ +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +#include "gtest/gtest.h" +#include "RiceDeltaDecoder.h" +#include "mozilla/ArrayUtils.h" + +using namespace mozilla; +using namespace mozilla::safebrowsing; + +struct TestingData { + std::vector<uint32_t> mExpectedDecoded; + std::vector<uint8_t> mEncoded; + uint32_t mRiceParameter; +}; + +static bool runOneTest(TestingData& aData); + +TEST(RiceDeltaDecoder, SingleEncodedValue) { + TestingData td = { { 99 }, { 99 }, 0 }; + + ASSERT_TRUE(runOneTest(td)); +} + +// In this batch of tests, the encoded data would be like +// what we originally receive from the network. See comment +// in |runOneTest| for more detail. +TEST(RiceDeltaDecoder, Empty) { + + // The following structure and testing data is copied from Chromium source code: + // + // https://chromium.googlesource.com/chromium/src.git/+/950f9975599768b6a08c7146cb4befa161be87aa/components/safe_browsing_db/v4_rice_unittest.cc#75 + // + // and will be translated to our own testing format. + + struct RiceDecodingTestInfo { + uint32_t mRiceParameter; + std::vector<uint32_t> mDeltas; + std::string mEncoded; + + RiceDecodingTestInfo(uint32_t aRiceParameter, + const std::vector<uint32_t>& aDeltas, + const std::string& aEncoded) + : mRiceParameter(aRiceParameter) + , mDeltas(aDeltas) + , mEncoded(aEncoded) + { + } + }; + + // Copyright 2016 The Chromium Authors. All rights reserved. + // Use of this source code is governed by a BSD-style license that can be + // found in the media/webrtc/trunk/webrtc/LICENSE. + + // ----- Start of Chromium test code ---- + const std::vector<RiceDecodingTestInfo> TESTING_DATA_CHROMIUM = { + RiceDecodingTestInfo(2, {15, 9}, "\xf7\x2"), + RiceDecodingTestInfo( + 28, {1777762129, 2093280223, 924369848}, + "\xbf\xa8\x3f\xfb\xfc\xfb\x5e\x27\xe6\xc3\x1d\xc6\x38"), + RiceDecodingTestInfo( + 28, {62763050, 1046523781, 192522171, 1800511020, 4442775, 582142548}, + "\x54\x60\x7b\xe7\x0a\x5f\xc1\xdc\xee\x69\xde" + "\xfe\x58\x3c\xa3\xd6\xa5\xf2\x10\x8c\x4a\x59" + "\x56\x00"), + RiceDecodingTestInfo( + 28, {26067715, 344823336, 8420095, 399843890, 95029378, 731622412, + 35811335, 1047558127, 1117722715, 78698892}, + "\x06\x86\x1b\x23\x14\xcb\x46\xf2\xaf\x07\x08\xc9\x88\x54\x1f\x41\x04" + "\xd5\x1a\x03\xeb\xe6\x3a\x80\x13\x91\x7b\xbf\x83\xf3\xb7\x85\xf1\x29" + "\x18\xb3\x61\x09"), + RiceDecodingTestInfo( + 27, {225846818, 328287420, 166748623, 29117720, 552397365, 350353215, + 558267528, 4738273, 567093445, 28563065, 55077698, 73091685, + 339246010, 98242620, 38060941, 63917830, 206319759, 137700744}, + "\x89\x98\xd8\x75\xbc\x44\x91\xeb\x39\x0c\x3e\x30\x9a\x78\xf3\x6a\xd4" + "\xd9\xb1\x9f\xfb\x70\x3e\x44\x3e\xa3\x08\x67\x42\xc2\x2b\x46\x69\x8e" + "\x3c\xeb\xd9\x10\x5a\x43\x9a\x32\xa5\x2d\x4e\x77\x0f\x87\x78\x20\xb6" + "\xab\x71\x98\x48\x0c\x9e\x9e\xd7\x23\x0c\x13\x43\x2c\xa9\x01"), + RiceDecodingTestInfo( + 28, {339784008, 263128563, 63871877, 69723256, 826001074, 797300228, + 671166008, 207712688}, + std::string("\x21\xc5\x02\x91\xf9\x82\xd7\x57\xb8\xe9\x3c\xf0\xc8\x4f" + "\xe8\x64\x8d\x77\x62\x04\xd6\x85\x3f\x1c\x97\x00\x04\x1b" + "\x17\xc6", + 30)), + RiceDecodingTestInfo( + 28, {471820069, 196333855, 855579133, 122737976, 203433838, 85354544, + 1307949392, 165938578, 195134475, 553930435, 49231136}, + "\x95\x9c\x7d\xb0\x8f\xe8\xd9\xbd\xfe\x8c\x7f\x81\x53\x0d\x75\xdc\x4e" + "\x40\x18\x0c\x9a\x45\x3d\xa8\xdc\xfa\x26\x59\x40\x9e\x16\x08\x43\x77" + "\xc3\x4e\x04\x01\xa4\xe6\x5d\x00"), + RiceDecodingTestInfo( + 27, {87336845, 129291033, 30906211, 433549264, 30899891, 53207875, + 11959529, 354827862, 82919275, 489637251, 53561020, 336722992, + 408117728, 204506246, 188216092, 9047110, 479817359, 230317256}, + "\x1a\x4f\x69\x2a\x63\x9a\xf6\xc6\x2e\xaf\x73\xd0\x6f\xd7\x31\xeb\x77" + "\x1d\x43\xe3\x2b\x93\xce\x67\x8b\x59\xf9\x98\xd4\xda\x4f\x3c\x6f\xb0" + "\xe8\xa5\x78\x8d\x62\x36\x18\xfe\x08\x1e\x78\xd8\x14\x32\x24\x84\x61" + "\x1c\xf3\x37\x63\xc4\xa0\x88\x7b\x74\xcb\x64\xc8\x5c\xba\x05"), + RiceDecodingTestInfo( + 28, {297968956, 19709657, 259702329, 76998112, 1023176123, 29296013, + 1602741145, 393745181, 177326295, 55225536, 75194472}, + "\xf1\x94\x0a\x87\x6c\x5f\x96\x90\xe3\xab\xf7\xc0\xcb\x2d\xe9\x76\xdb" + "\xf8\x59\x63\xc1\x6f\x7c\x99\xe3\x87\x5f\xc7\x04\xde\xb9\x46\x8e\x54" + "\xc0\xac\x4a\x03\x0d\x6c\x8f\x00"), + RiceDecodingTestInfo( + 28, {532220688, 780594691, 436816483, 163436269, 573044456, 1069604, + 39629436, 211410997, 227714491, 381562898, 75610008, 196754597, + 40310339, 15204118, 99010842}, + "\x41\x2c\xe4\xfe\x06\xdc\x0d\xbd\x31\xa5\x04\xd5\x6e\xdd\x9b\x43\xb7" + "\x3f\x11\x24\x52\x10\x80\x4f\x96\x4b\xd4\x80\x67\xb2\xdd\x52\xc9\x4e" + "\x02\xc6\xd7\x60\xde\x06\x92\x52\x1e\xdd\x35\x64\x71\x26\x2c\xfe\xcf" + "\x81\x46\xb2\x79\x01"), + RiceDecodingTestInfo( + 28, {219354713, 389598618, 750263679, 554684211, 87381124, 4523497, + 287633354, 801308671, 424169435, 372520475, 277287849}, + "\xb2\x2c\x26\x3a\xcd\x66\x9c\xdb\x5f\x07\x2e\x6f\xe6\xf9\x21\x10\x52" + "\xd5\x94\xf4\x82\x22\x48\xf9\x9d\x24\xf6\xff\x2f\xfc\x6d\x3f\x21\x65" + "\x1b\x36\x34\x56\xea\xc4\x21\x00"), + }; + + // ----- End of Chromium test code ---- + + for (auto tdc : TESTING_DATA_CHROMIUM) { + // Populate chromium testing data to our native testing data struct. + TestingData d; + + d.mRiceParameter = tdc.mRiceParameter; // Populate rice parameter. + + // Populate encoded data from std::string to vector<uint8>. + d.mEncoded.resize(tdc.mEncoded.size()); + memcpy(&d.mEncoded[0], tdc.mEncoded.c_str(), tdc.mEncoded.size()); + + // Populate deltas to expected decoded data. The first value would be just + // set to an arbitrary value, say 7, to avoid any assumption to the + // first value in the implementation. + d.mExpectedDecoded.resize(tdc.mDeltas.size() + 1); + for (size_t i = 0; i < d.mExpectedDecoded.size(); i++) { + if (0 == i) { + d.mExpectedDecoded[i] = 7; // "7" is an arbitrary starting value + } else { + d.mExpectedDecoded[i] = d.mExpectedDecoded[i - 1] + tdc.mDeltas[i - 1]; + } + } + + ASSERT_TRUE(runOneTest(d)); + } +} + +static bool +runOneTest(TestingData& aData) +{ + RiceDeltaDecoder decoder(&aData.mEncoded[0], aData.mEncoded.size()); + + std::vector<uint32_t> decoded(aData.mExpectedDecoded.size()); + + uint32_t firstValue = aData.mExpectedDecoded[0]; + bool rv = decoder.Decode(aData.mRiceParameter, + firstValue, + decoded.size() - 1, // # of entries (first value not included). + &decoded[0]); + + return rv && decoded == aData.mExpectedDecoded; +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp b/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp new file mode 100644 index 000000000..fe6f28960 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp @@ -0,0 +1,24 @@ +#include "safebrowsing.pb.h" +#include "gtest/gtest.h" + +TEST(SafeBrowsingProtobuf, Empty) +{ + using namespace mozilla::safebrowsing; + + const std::string CLIENT_ID = "firefox"; + + // Construct a simple update request. + FetchThreatListUpdatesRequest r; + r.set_allocated_client(new ClientInfo()); + r.mutable_client()->set_client_id(CLIENT_ID); + + // Then serialize. + std::string s; + r.SerializeToString(&s); + + // De-serialize. + FetchThreatListUpdatesRequest r2; + r2.ParseFromString(s); + + ASSERT_EQ(r2.client().client_id(), CLIENT_ID); +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp b/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp new file mode 100644 index 000000000..89ed74be6 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp @@ -0,0 +1,52 @@ +#include "Entries.h" +#include "mozilla/EndianUtils.h" + +TEST(SafebrowsingHash, ToFromUint32) +{ + using namespace mozilla::safebrowsing; + + // typedef SafebrowsingHash<PREFIX_SIZE, PrefixComparator> Prefix; + // typedef nsTArray<Prefix> PrefixArray; + + const char PREFIX_RAW[4] = { 0x1, 0x2, 0x3, 0x4 }; + uint32_t PREFIX_UINT32; + memcpy(&PREFIX_UINT32, PREFIX_RAW, 4); + + Prefix p; + p.Assign(nsCString(PREFIX_RAW, 4)); + ASSERT_EQ(p.ToUint32(), PREFIX_UINT32); + + p.FromUint32(PREFIX_UINT32); + ASSERT_EQ(memcmp(PREFIX_RAW, p.buf, 4), 0); +} + +TEST(SafebrowsingHash, Compare) +{ + using namespace mozilla; + using namespace mozilla::safebrowsing; + + Prefix p1, p2, p3; + + // The order of p1,p2,p3 is "p1 == p3 < p2" +#if MOZ_LITTLE_ENDIAN + p1.Assign(nsCString("\x01\x00\x00\x00", 4)); + p2.Assign(nsCString("\x00\x00\x00\x01", 4)); + p3.Assign(nsCString("\x01\x00\x00\x00", 4)); +#else + p1.Assign(nsCString("\x00\x00\x00\x01", 4)); + p2.Assign(nsCString("\x01\x00\x00\x00", 4)); + p3.Assign(nsCString("\x00\x00\x00\x01", 4)); +#endif + + // Make sure "p1 == p3 < p2" is true + // on both little and big endian machine. + + ASSERT_EQ(p1.Compare(p2), -1); + ASSERT_EQ(p1.Compare(p1), 0); + ASSERT_EQ(p2.Compare(p1), 1); + ASSERT_EQ(p1.Compare(p3), 0); + + ASSERT_TRUE(p1 < p2); + ASSERT_TRUE(p1 == p1); + ASSERT_TRUE(p1 == p3); +}
\ No newline at end of file diff --git a/toolkit/components/url-classifier/tests/gtest/TestTable.cpp b/toolkit/components/url-classifier/tests/gtest/TestTable.cpp new file mode 100644 index 000000000..307587459 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestTable.cpp @@ -0,0 +1,47 @@ +#include "gtest/gtest.h" +#include "nsUrlClassifierDBService.h" + +using namespace mozilla::safebrowsing; + +void +TestResponseCode(const char* table, nsresult result) +{ + nsCString tableName(table); + ASSERT_EQ(TablesToResponse(tableName), result); +} + +TEST(UrlClassifierTable, ResponseCode) +{ + // malware URIs. + TestResponseCode("goog-malware-shavar", NS_ERROR_MALWARE_URI); + TestResponseCode("test-malware-simple", NS_ERROR_MALWARE_URI); + TestResponseCode("goog-phish-shavar,test-malware-simple", NS_ERROR_MALWARE_URI); + TestResponseCode("test-malware-simple,mozstd-track-digest256,mozplugin-block-digest256", NS_ERROR_MALWARE_URI); + + // phish URIs. + TestResponseCode("goog-phish-shavar", NS_ERROR_PHISHING_URI); + TestResponseCode("test-phish-simple", NS_ERROR_PHISHING_URI); + TestResponseCode("test-phish-simple,mozplugin-block-digest256", NS_ERROR_PHISHING_URI); + TestResponseCode("mozstd-track-digest256,test-phish-simple,goog-unwanted-shavar", NS_ERROR_PHISHING_URI); + + // unwanted URIs. + TestResponseCode("goog-unwanted-shavar", NS_ERROR_UNWANTED_URI); + TestResponseCode("test-unwanted-simple", NS_ERROR_UNWANTED_URI); + TestResponseCode("mozplugin-unwanted-digest256,mozfull-track-digest256", NS_ERROR_UNWANTED_URI); + TestResponseCode("test-block-simple,mozfull-track-digest256,test-unwanted-simple", NS_ERROR_UNWANTED_URI); + + // track URIs. + TestResponseCode("test-track-simple", NS_ERROR_TRACKING_URI); + TestResponseCode("mozstd-track-digest256", NS_ERROR_TRACKING_URI); + TestResponseCode("test-block-simple,mozstd-track-digest256", NS_ERROR_TRACKING_URI); + + // block URIs + TestResponseCode("test-block-simple", NS_ERROR_BLOCKED_URI); + TestResponseCode("mozplugin-block-digest256", NS_ERROR_BLOCKED_URI); + TestResponseCode("mozplugin2-block-digest256", NS_ERROR_BLOCKED_URI); + + TestResponseCode("test-trackwhite-simple", NS_OK); + TestResponseCode("mozstd-trackwhite-digest256", NS_OK); + TestResponseCode("goog-badbinurl-shavar", NS_OK); + TestResponseCode("goog-downloadwhite-digest256", NS_OK); +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp new file mode 100644 index 000000000..470a88ba2 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp @@ -0,0 +1,755 @@ +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +#include "Common.h" +#include "Classifier.h" +#include "HashStore.h" +#include "nsAppDirectoryServiceDefs.h" +#include "nsIFile.h" +#include "nsIThread.h" +#include "string.h" +#include "gtest/gtest.h" +#include "nsThreadUtils.h" + +using namespace mozilla; +using namespace mozilla::safebrowsing; + +typedef nsCString _Prefix; +typedef nsTArray<_Prefix> _PrefixArray; + +#define GTEST_SAFEBROWSING_DIR NS_LITERAL_CSTRING("safebrowsing") +#define GTEST_TABLE NS_LITERAL_CSTRING("gtest-malware-proto") +#define GTEST_PREFIXFILE NS_LITERAL_CSTRING("gtest-malware-proto.pset") + +// This function removes common elements of inArray and outArray from +// outArray. This is used by partial update testcase to ensure partial update +// data won't contain prefixes we already have. +static void +RemoveIntersection(const _PrefixArray& inArray, _PrefixArray& outArray) +{ + for (uint32_t i = 0; i < inArray.Length(); i++) { + int32_t idx = outArray.BinaryIndexOf(inArray[i]); + if (idx >= 0) { + outArray.RemoveElementAt(idx); + } + } +} + +// This fucntion removes elements from outArray by index specified in +// removal array. +static void +RemoveElements(const nsTArray<uint32_t>& removal, _PrefixArray& outArray) +{ + for (int32_t i = removal.Length() - 1; i >= 0; i--) { + outArray.RemoveElementAt(removal[i]); + } +} + +static void +MergeAndSortArray(const _PrefixArray& array1, + const _PrefixArray& array2, + _PrefixArray& output) +{ + output.Clear(); + output.AppendElements(array1); + output.AppendElements(array2); + output.Sort(); +} + +static void +CalculateCheckSum(_PrefixArray& prefixArray, nsCString& checksum) +{ + prefixArray.Sort(); + + nsresult rv; + nsCOMPtr<nsICryptoHash> cryptoHash = + do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID, &rv); + + cryptoHash->Init(nsICryptoHash::SHA256); + for (uint32_t i = 0; i < prefixArray.Length(); i++) { + const _Prefix& prefix = prefixArray[i]; + cryptoHash->Update(reinterpret_cast<uint8_t*>( + const_cast<char*>(prefix.get())), prefix.Length()); + } + cryptoHash->Finish(false, checksum); +} + +// N: Number of prefixes, MIN/MAX: minimum/maximum prefix size +// This function will append generated prefixes to outArray. +static void +CreateRandomSortedPrefixArray(uint32_t N, + uint32_t MIN, + uint32_t MAX, + _PrefixArray& outArray) +{ + outArray.SetCapacity(outArray.Length() + N); + + const uint32_t range = (MAX - MIN + 1); + + for (uint32_t i = 0; i < N; i++) { + uint32_t prefixSize = (rand() % range) + MIN; + _Prefix prefix; + prefix.SetLength(prefixSize); + + while (true) { + char* dst = prefix.BeginWriting(); + for (uint32_t j = 0; j < prefixSize; j++) { + dst[j] = rand() % 256; + } + + if (!outArray.Contains(prefix)) { + outArray.AppendElement(prefix); + break; + } + } + } + + outArray.Sort(); +} + +// N: Number of removal indices, MAX: maximum index +static void +CreateRandomRemovalIndices(uint32_t N, + uint32_t MAX, + nsTArray<uint32_t>& outArray) +{ + for (uint32_t i = 0; i < N; i++) { + uint32_t idx = rand() % MAX; + if (!outArray.Contains(idx)) { + outArray.InsertElementSorted(idx); + } + } +} + +// Function to generate TableUpdateV4. +static void +GenerateUpdateData(bool fullUpdate, + PrefixStringMap& add, + nsTArray<uint32_t>* removal, + nsCString* checksum, + nsTArray<TableUpdate*>& tableUpdates) +{ + TableUpdateV4* tableUpdate = new TableUpdateV4(GTEST_TABLE); + tableUpdate->SetFullUpdate(fullUpdate); + + for (auto iter = add.ConstIter(); !iter.Done(); iter.Next()) { + nsCString* pstring = iter.Data(); + std::string str(pstring->BeginReading(), pstring->Length()); + + tableUpdate->NewPrefixes(iter.Key(), str); + } + + if (removal) { + tableUpdate->NewRemovalIndices(removal->Elements(), removal->Length()); + } + + if (checksum) { + std::string stdChecksum; + stdChecksum.assign(const_cast<char*>(checksum->BeginReading()), checksum->Length()); + + tableUpdate->NewChecksum(stdChecksum); + } + + tableUpdates.AppendElement(tableUpdate); +} + +static void +VerifyPrefixSet(PrefixStringMap& expected) +{ + // Verify the prefix set is written to disk. + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + + file->AppendNative(GTEST_SAFEBROWSING_DIR); + file->AppendNative(GTEST_PREFIXFILE); + + RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet; + load->Init(GTEST_TABLE); + + PrefixStringMap prefixesInFile; + load->LoadFromFile(file); + load->GetPrefixes(prefixesInFile); + + for (auto iter = expected.ConstIter(); !iter.Done(); iter.Next()) { + nsCString* expectedPrefix = iter.Data(); + nsCString* resultPrefix = prefixesInFile.Get(iter.Key()); + + ASSERT_TRUE(*resultPrefix == *expectedPrefix); + } +} + +static void +Clear() +{ + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + + UniquePtr<Classifier> classifier(new Classifier()); + classifier->Open(*file); + classifier->Reset(); +} + +static void +testUpdateFail(nsTArray<TableUpdate*>& tableUpdates) +{ + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + + UniquePtr<Classifier> classifier(new Classifier()); + classifier->Open(*file); + + RunTestInNewThread([&] () -> void { + nsresult rv = classifier->ApplyUpdates(&tableUpdates); + ASSERT_TRUE(NS_FAILED(rv)); + }); +} + +static void +testUpdate(nsTArray<TableUpdate*>& tableUpdates, + PrefixStringMap& expected) +{ + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + + UniquePtr<Classifier> classifier(new Classifier()); + classifier->Open(*file); + + RunTestInNewThread([&] () -> void { + nsresult rv = classifier->ApplyUpdates(&tableUpdates); + ASSERT_TRUE(rv == NS_OK); + + VerifyPrefixSet(expected); + }); +} + +static void +testFullUpdate(PrefixStringMap& add, nsCString* checksum) +{ + nsTArray<TableUpdate*> tableUpdates; + + GenerateUpdateData(true, add, nullptr, checksum, tableUpdates); + + testUpdate(tableUpdates, add); +} + +static void +testPartialUpdate(PrefixStringMap& add, + nsTArray<uint32_t>* removal, + nsCString* checksum, + PrefixStringMap& expected) +{ + nsTArray<TableUpdate*> tableUpdates; + GenerateUpdateData(false, add, removal, checksum, tableUpdates); + + testUpdate(tableUpdates, expected); +} + +static void +testOpenLookupCache() +{ + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file)); + file->AppendNative(GTEST_SAFEBROWSING_DIR); + + RunTestInNewThread([&] () -> void { + LookupCacheV4 cache(nsCString(GTEST_TABLE), EmptyCString(), file); + nsresult rv = cache.Init(); + ASSERT_EQ(rv, NS_OK); + + rv = cache.Open(); + ASSERT_EQ(rv, NS_OK); + }); +} + +// Tests start from here. +TEST(UrlClassifierTableUpdateV4, FixLenghtPSetFullUpdate) +{ + srand(time(NULL)); + + _PrefixArray array; + PrefixStringMap map; + nsCString checksum; + + CreateRandomSortedPrefixArray(5000, 4, 4, array); + PrefixArrayToPrefixStringMap(array, map); + CalculateCheckSum(array, checksum); + + testFullUpdate(map, &checksum); + + Clear(); +} + +TEST(UrlClassifierTableUpdateV4, VariableLenghtPSetFullUpdate) +{ + _PrefixArray array; + PrefixStringMap map; + nsCString checksum; + + CreateRandomSortedPrefixArray(5000, 5, 32, array); + PrefixArrayToPrefixStringMap(array, map); + CalculateCheckSum(array, checksum); + + testFullUpdate(map, &checksum); + + Clear(); +} + +// This test contain both variable length prefix set and fixed-length prefix set +TEST(UrlClassifierTableUpdateV4, MixedPSetFullUpdate) +{ + _PrefixArray array; + PrefixStringMap map; + nsCString checksum; + + CreateRandomSortedPrefixArray(5000, 4, 4, array); + CreateRandomSortedPrefixArray(1000, 5, 32, array); + PrefixArrayToPrefixStringMap(array, map); + CalculateCheckSum(array, checksum); + + testFullUpdate(map, &checksum); + + Clear(); +} + +TEST(UrlClassifierTableUpdateV4, PartialUpdateWithRemoval) +{ + _PrefixArray fArray; + + // Apply a full update first. + { + PrefixStringMap fMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(10000, 4, 4, fArray); + CreateRandomSortedPrefixArray(2000, 5, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + } + + // Apply a partial update with removal. + { + _PrefixArray pArray, mergedArray; + PrefixStringMap pMap, mergedMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(5000, 4, 4, pArray); + CreateRandomSortedPrefixArray(1000, 5, 32, pArray); + RemoveIntersection(fArray, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + // Remove 1/5 of elements of original prefix set. + nsTArray<uint32_t> removal; + CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal); + RemoveElements(removal, fArray); + + // Calculate the expected prefix map. + MergeAndSortArray(fArray, pArray, mergedArray); + PrefixArrayToPrefixStringMap(mergedArray, mergedMap); + CalculateCheckSum(mergedArray, checksum); + + testPartialUpdate(pMap, &removal, &checksum, mergedMap); + } + + Clear(); +} + +TEST(UrlClassifierTableUpdateV4, PartialUpdateWithoutRemoval) +{ + _PrefixArray fArray; + + // Apply a full update first. + { + PrefixStringMap fMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(10000, 4, 4, fArray); + CreateRandomSortedPrefixArray(2000, 5, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + } + + // Apply a partial update without removal + { + _PrefixArray pArray, mergedArray; + PrefixStringMap pMap, mergedMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(5000, 4, 4, pArray); + CreateRandomSortedPrefixArray(1000, 5, 32, pArray); + RemoveIntersection(fArray, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + // Calculate the expected prefix map. + MergeAndSortArray(fArray, pArray, mergedArray); + PrefixArrayToPrefixStringMap(mergedArray, mergedMap); + CalculateCheckSum(mergedArray, checksum); + + testPartialUpdate(pMap, nullptr, &checksum, mergedMap); + } + + Clear(); +} + +// Expect failure because partial update contains prefix already +// in old prefix set. +TEST(UrlClassifierTableUpdateV4, PartialUpdatePrefixAlreadyExist) +{ + _PrefixArray fArray; + + // Apply a full update fist. + { + PrefixStringMap fMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(1000, 4, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + } + + // Apply a partial update which contains a prefix in previous full update. + // This should cause an update error. + { + _PrefixArray pArray; + PrefixStringMap pMap; + nsTArray<TableUpdate*> tableUpdates; + + // Pick one prefix from full update prefix and add it to partial update. + // This should result a failure when call ApplyUpdates. + pArray.AppendElement(fArray[rand() % fArray.Length()]); + CreateRandomSortedPrefixArray(200, 4, 32, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + GenerateUpdateData(false, pMap, nullptr, nullptr, tableUpdates); + testUpdateFail(tableUpdates); + } + + Clear(); +} + +// Test apply partial update directly without applying an full update first. +TEST(UrlClassifierTableUpdateV4, OnlyPartialUpdate) +{ + _PrefixArray pArray; + PrefixStringMap pMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(5000, 4, 4, pArray); + CreateRandomSortedPrefixArray(1000, 5, 32, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + CalculateCheckSum(pArray, checksum); + + testPartialUpdate(pMap, nullptr, &checksum, pMap); + + Clear(); +} + +// Test partial update without any ADD prefixes, only removalIndices. +TEST(UrlClassifierTableUpdateV4, PartialUpdateOnlyRemoval) +{ + _PrefixArray fArray; + + // Apply a full update first. + { + PrefixStringMap fMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(5000, 4, 4, fArray); + CreateRandomSortedPrefixArray(1000, 5, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + } + + // Apply a partial update without add prefix, only contain removal indices. + { + _PrefixArray pArray; + PrefixStringMap pMap, mergedMap; + nsCString checksum; + + // Remove 1/5 of elements of original prefix set. + nsTArray<uint32_t> removal; + CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal); + RemoveElements(removal, fArray); + + PrefixArrayToPrefixStringMap(fArray, mergedMap); + CalculateCheckSum(fArray, checksum); + + testPartialUpdate(pMap, &removal, &checksum, mergedMap); + } + + Clear(); +} + +// Test one tableupdate array contains full update and multiple partial updates. +TEST(UrlClassifierTableUpdateV4, MultipleTableUpdates) +{ + _PrefixArray fArray, pArray, mergedArray; + PrefixStringMap fMap, pMap, mergedMap; + nsCString checksum; + + nsTArray<TableUpdate*> tableUpdates; + + // Generate first full udpate + CreateRandomSortedPrefixArray(10000, 4, 4, fArray); + CreateRandomSortedPrefixArray(2000, 5, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + GenerateUpdateData(true, fMap, nullptr, &checksum, tableUpdates); + + // Generate second partial update + CreateRandomSortedPrefixArray(3000, 4, 4, pArray); + CreateRandomSortedPrefixArray(1000, 5, 32, pArray); + RemoveIntersection(fArray, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + MergeAndSortArray(fArray, pArray, mergedArray); + CalculateCheckSum(mergedArray, checksum); + + GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates); + + // Generate thrid partial update + fArray.AppendElements(pArray); + fArray.Sort(); + pArray.Clear(); + CreateRandomSortedPrefixArray(3000, 4, 4, pArray); + CreateRandomSortedPrefixArray(1000, 5, 32, pArray); + RemoveIntersection(fArray, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + // Remove 1/5 of elements of original prefix set. + nsTArray<uint32_t> removal; + CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal); + RemoveElements(removal, fArray); + + MergeAndSortArray(fArray, pArray, mergedArray); + PrefixArrayToPrefixStringMap(mergedArray, mergedMap); + CalculateCheckSum(mergedArray, checksum); + + GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates); + + testUpdate(tableUpdates, mergedMap); + + Clear(); +} + +// Test apply full update first, and then apply multiple partial updates +// in one tableupdate array. +TEST(UrlClassifierTableUpdateV4, MultiplePartialUpdateTableUpdates) +{ + _PrefixArray fArray; + + // Apply a full update first + { + PrefixStringMap fMap; + nsCString checksum; + + // Generate first full udpate + CreateRandomSortedPrefixArray(10000, 4, 4, fArray); + CreateRandomSortedPrefixArray(3000, 5, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + } + + // Apply multiple partial updates in one table update + { + _PrefixArray pArray, mergedArray; + PrefixStringMap pMap, mergedMap; + nsCString checksum; + nsTArray<uint32_t> removal; + nsTArray<TableUpdate*> tableUpdates; + + // Generate first partial update + CreateRandomSortedPrefixArray(3000, 4, 4, pArray); + CreateRandomSortedPrefixArray(1000, 5, 32, pArray); + RemoveIntersection(fArray, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + // Remove 1/5 of elements of original prefix set. + CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal); + RemoveElements(removal, fArray); + + MergeAndSortArray(fArray, pArray, mergedArray); + CalculateCheckSum(mergedArray, checksum); + + GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates); + + fArray.AppendElements(pArray); + fArray.Sort(); + pArray.Clear(); + removal.Clear(); + + // Generate second partial update. + CreateRandomSortedPrefixArray(2000, 4, 4, pArray); + CreateRandomSortedPrefixArray(1000, 5, 32, pArray); + RemoveIntersection(fArray, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + // Remove 1/5 of elements of original prefix set. + CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal); + RemoveElements(removal, fArray); + + MergeAndSortArray(fArray, pArray, mergedArray); + PrefixArrayToPrefixStringMap(mergedArray, mergedMap); + CalculateCheckSum(mergedArray, checksum); + + GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates); + + testUpdate(tableUpdates, mergedMap); + } + + Clear(); +} + +// Test removal indices are larger than the original prefix set. +TEST(UrlClassifierTableUpdateV4, RemovalIndexTooLarge) +{ + _PrefixArray fArray; + + // Apply a full update first + { + PrefixStringMap fMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(1000, 4, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + } + + // Apply a partial update with removal indice array larger than + // old prefix set(fArray). This should cause an error. + { + _PrefixArray pArray; + PrefixStringMap pMap; + nsTArray<uint32_t> removal; + nsTArray<TableUpdate*> tableUpdates; + + CreateRandomSortedPrefixArray(200, 4, 32, pArray); + RemoveIntersection(fArray, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + for (uint32_t i = 0; i < fArray.Length() + 1 ;i++) { + removal.AppendElement(i); + } + + GenerateUpdateData(false, pMap, &removal, nullptr, tableUpdates); + testUpdateFail(tableUpdates); + } + + Clear(); +} + +TEST(UrlClassifierTableUpdateV4, ChecksumMismatch) +{ + // Apply a full update first + { + _PrefixArray fArray; + PrefixStringMap fMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(1000, 4, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + } + + // Apply a partial update with incorrect checksum + { + _PrefixArray pArray; + PrefixStringMap pMap; + nsCString checksum; + nsTArray<TableUpdate*> tableUpdates; + + CreateRandomSortedPrefixArray(200, 4, 32, pArray); + PrefixArrayToPrefixStringMap(pArray, pMap); + + // Checksum should be calculated with both old prefix set and add prefix set, + // here we only calculate checksum with add prefix set to check if applyUpdate + // will return failure. + CalculateCheckSum(pArray, checksum); + + GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates); + testUpdateFail(tableUpdates); + } + + Clear(); +} + +TEST(UrlClassifierTableUpdateV4, ApplyUpdateThenLoad) +{ + // Apply update with checksum + { + _PrefixArray fArray; + PrefixStringMap fMap; + nsCString checksum; + + CreateRandomSortedPrefixArray(1000, 4, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + CalculateCheckSum(fArray, checksum); + + testFullUpdate(fMap, &checksum); + + // Open lookup cache will load prefix set and verify the checksum + testOpenLookupCache(); + } + + Clear(); + + // Apply update without checksum + { + _PrefixArray fArray; + PrefixStringMap fMap; + + CreateRandomSortedPrefixArray(1000, 4, 32, fArray); + PrefixArrayToPrefixStringMap(fArray, fMap); + + testFullUpdate(fMap, nullptr); + + testOpenLookupCache(); + } + + Clear(); +} + +// This test is used to avoid an eror from nsICryptoHash +TEST(UrlClassifierTableUpdateV4, ApplyUpdateWithFixedChecksum) +{ + _PrefixArray fArray = { _Prefix("enus"), _Prefix("apollo"), _Prefix("mars"), + _Prefix("Hecatonchires cyclopes"), + _Prefix("vesta"), _Prefix("neptunus"), _Prefix("jupiter"), + _Prefix("diana"), _Prefix("minerva"), _Prefix("ceres"), + _Prefix("Aidos,Adephagia,Adikia,Aletheia"), + _Prefix("hecatonchires"), _Prefix("alcyoneus"), _Prefix("hades"), + _Prefix("vulcanus"), _Prefix("juno"), _Prefix("mercury"), + _Prefix("Stheno, Euryale and Medusa") + }; + fArray.Sort(); + + PrefixStringMap fMap; + PrefixArrayToPrefixStringMap(fArray, fMap); + + nsCString checksum("\xae\x18\x94\xd7\xd0\x83\x5f\xc1" + "\x58\x59\x5c\x2c\x72\xb9\x6e\x5e" + "\xf4\xe8\x0a\x6b\xff\x5e\x6b\x81" + "\x65\x34\x06\x16\x06\x59\xa0\x67"); + + testFullUpdate(fMap, &checksum); + + // Open lookup cache will load prefix set and verify the checksum + testOpenLookupCache(); + + Clear(); +} + diff --git a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp new file mode 100644 index 000000000..fa5ce4f56 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp @@ -0,0 +1,276 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include <stdio.h> +#include <ctype.h> + +#include <mozilla/RefPtr.h> +#include "nsString.h" +#include "nsEscape.h" +#include "nsUrlClassifierUtils.h" +#include "stdlib.h" +#include "gtest/gtest.h" + +static char int_to_hex_digit(int32_t i) { + NS_ASSERTION((i >= 0) && (i <= 15), "int too big in int_to_hex_digit"); + return static_cast<char>(((i < 10) ? (i + '0') : ((i - 10) + 'A'))); +} + +static void CheckEquals(nsCString& expected, nsCString& actual) +{ + ASSERT_TRUE((expected).Equals((actual))); +} + +void TestUnescapeHelper(const char* in, const char* expected) +{ + nsCString out, strIn(in), strExp(expected); + + NS_UnescapeURL(strIn.get(), strIn.Length(), esc_AlwaysCopy, out); + CheckEquals(strExp, out); +} + +// Make sure Unescape from nsEncode.h's unescape does what the server does. +TEST(UrlClassifierUtils, Unescape) +{ + // test empty string + TestUnescapeHelper("\0", "\0"); + + // Test docoding of all characters. + nsCString allCharsEncoded, allCharsEncodedLowercase, allCharsAsString; + for (int32_t i = 1; i < 256; ++i) { + allCharsEncoded.Append('%'); + allCharsEncoded.Append(int_to_hex_digit(i / 16)); + allCharsEncoded.Append((int_to_hex_digit(i % 16))); + + allCharsEncodedLowercase.Append('%'); + allCharsEncodedLowercase.Append(tolower(int_to_hex_digit(i / 16))); + allCharsEncodedLowercase.Append(tolower(int_to_hex_digit(i % 16))); + + allCharsAsString.Append(static_cast<char>(i)); + } + + nsCString out; + NS_UnescapeURL(allCharsEncoded.get(), + allCharsEncoded.Length(), + esc_AlwaysCopy, + out); + + CheckEquals(allCharsAsString, out); + + out.Truncate(); + NS_UnescapeURL(allCharsEncodedLowercase.get(), + allCharsEncodedLowercase.Length(), + esc_AlwaysCopy, + out); + CheckEquals(allCharsAsString, out); + + // Test %-related edge cases + TestUnescapeHelper("%", "%"); + TestUnescapeHelper("%xx", "%xx"); + TestUnescapeHelper("%%", "%%"); + TestUnescapeHelper("%%%", "%%%"); + TestUnescapeHelper("%%%%", "%%%%"); + TestUnescapeHelper("%1", "%1"); + TestUnescapeHelper("%1z", "%1z"); + TestUnescapeHelper("a%1z", "a%1z"); + TestUnescapeHelper("abc%d%e%fg%hij%klmno%", "abc%d%e%fg%hij%klmno%"); + + // A few more tests + TestUnescapeHelper("%25", "%"); + TestUnescapeHelper("%25%32%35", "%25"); +} + +void TestEncodeHelper(const char* in, const char* expected) +{ + nsCString out, strIn(in), strExp(expected); + RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils; + utils->Init(); + + utils->SpecialEncode(strIn, true, out); + CheckEquals(strExp, out); +} + +TEST(UrlClassifierUtils, Enc) +{ + // Test empty string + TestEncodeHelper("", ""); + + // Test that all characters we shouldn't encode ([33-36],[38,126]) are not. + nsCString noenc; + for (int32_t i = 33; i < 127; i++) { + if (i != 37) { // skip % + noenc.Append(static_cast<char>(i)); + } + } + RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils; + utils->Init(); + nsCString out; + utils->SpecialEncode(noenc, false, out); + CheckEquals(noenc, out); + + // Test that all the chars that we should encode [0,32],37,[127,255] are + nsCString yesAsString, yesExpectedString; + for (int32_t i = 1; i < 256; i++) { + if (i < 33 || i == 37 || i > 126) { + yesAsString.Append(static_cast<char>(i)); + yesExpectedString.Append('%'); + yesExpectedString.Append(int_to_hex_digit(i / 16)); + yesExpectedString.Append(int_to_hex_digit(i % 16)); + } + } + + out.Truncate(); + utils->SpecialEncode(yesAsString, false, out); + CheckEquals(yesExpectedString, out); + + TestEncodeHelper("blah//blah", "blah/blah"); +} + +void TestCanonicalizeHelper(const char* in, const char* expected) +{ + nsCString out, strIn(in), strExp(expected); + RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils; + utils->Init(); + + utils->CanonicalizePath(strIn, out); + CheckEquals(strExp, out); +} + +TEST(UrlClassifierUtils, Canonicalize) +{ + // Test repeated %-decoding. Note: %25 --> %, %32 --> 2, %35 --> 5 + TestCanonicalizeHelper("%25", "%25"); + TestCanonicalizeHelper("%25%32%35", "%25"); + TestCanonicalizeHelper("asdf%25%32%35asd", "asdf%25asd"); + TestCanonicalizeHelper("%%%25%32%35asd%%", "%25%25%25asd%25%25"); + TestCanonicalizeHelper("%25%32%35%25%32%35%25%32%35", "%25%25%25"); + TestCanonicalizeHelper("%25", "%25"); + TestCanonicalizeHelper("%257Ea%2521b%2540c%2523d%2524e%25f%255E00%252611%252A22%252833%252944_55%252B", + "~a!b@c#d$e%25f^00&11*22(33)44_55+"); + + TestCanonicalizeHelper("", ""); + TestCanonicalizeHelper("%31%36%38%2e%31%38%38%2e%39%39%2e%32%36/%2E%73%65%63%75%72%65/%77%77%77%2E%65%62%61%79%2E%63%6F%6D/", + "168.188.99.26/.secure/www.ebay.com/"); + TestCanonicalizeHelper("195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/", + "195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/"); + // Added in bug 489455. %00 should no longer be changed to %01. + TestCanonicalizeHelper("%00", "%00"); +} + +void TestParseIPAddressHelper(const char *in, const char *expected) +{ + nsCString out, strIn(in), strExp(expected); + RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils; + utils->Init(); + + utils->ParseIPAddress(strIn, out); + CheckEquals(strExp, out); +} + +TEST(UrlClassifierUtils, ParseIPAddress) +{ + TestParseIPAddressHelper("123.123.0.0.1", ""); + TestParseIPAddressHelper("255.0.0.1", "255.0.0.1"); + TestParseIPAddressHelper("12.0x12.01234", "12.18.2.156"); + TestParseIPAddressHelper("276.2.3", "20.2.0.3"); + TestParseIPAddressHelper("012.034.01.055", "10.28.1.45"); + TestParseIPAddressHelper("0x12.0x43.0x44.0x01", "18.67.68.1"); + TestParseIPAddressHelper("167838211", "10.1.2.3"); + TestParseIPAddressHelper("3279880203", "195.127.0.11"); + TestParseIPAddressHelper("0x12434401", "18.67.68.1"); + TestParseIPAddressHelper("413960661", "24.172.137.213"); + TestParseIPAddressHelper("03053104725", "24.172.137.213"); + TestParseIPAddressHelper("030.0254.0x89d5", "24.172.137.213"); + TestParseIPAddressHelper("1.234.4.0377", "1.234.4.255"); + TestParseIPAddressHelper("1.2.3.00x0", ""); + TestParseIPAddressHelper("10.192.95.89 xy", "10.192.95.89"); + TestParseIPAddressHelper("10.192.95.89 xyz", ""); + TestParseIPAddressHelper("1.2.3.0x0", "1.2.3.0"); + TestParseIPAddressHelper("1.2.3.4", "1.2.3.4"); +} + +void TestCanonicalNumHelper(const char *in, uint32_t bytes, + bool allowOctal, const char *expected) +{ + nsCString out, strIn(in), strExp(expected); + RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils; + utils->Init(); + + utils->CanonicalNum(strIn, bytes, allowOctal, out); + CheckEquals(strExp, out); +} + +TEST(UrlClassifierUtils, CanonicalNum) +{ + TestCanonicalNumHelper("", 1, true, ""); + TestCanonicalNumHelper("10", 0, true, ""); + TestCanonicalNumHelper("45", 1, true, "45"); + TestCanonicalNumHelper("0x10", 1, true, "16"); + TestCanonicalNumHelper("367", 2, true, "1.111"); + TestCanonicalNumHelper("012345", 3, true, "0.20.229"); + TestCanonicalNumHelper("0173", 1, true, "123"); + TestCanonicalNumHelper("09", 1, false, "9"); + TestCanonicalNumHelper("0x120x34", 2, true, ""); + TestCanonicalNumHelper("0x12fc", 2, true, "18.252"); + TestCanonicalNumHelper("3279880203", 4, true, "195.127.0.11"); + TestCanonicalNumHelper("0x0000059", 1, true, "89"); + TestCanonicalNumHelper("0x00000059", 1, true, "89"); + TestCanonicalNumHelper("0x0000067", 1, true, "103"); +} + +void TestHostnameHelper(const char *in, const char *expected) +{ + nsCString out, strIn(in), strExp(expected); + RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils; + utils->Init(); + + utils->CanonicalizeHostname(strIn, out); + CheckEquals(strExp, out); +} + +TEST(UrlClassifierUtils, Hostname) +{ + TestHostnameHelper("abcd123;[]", "abcd123;[]"); + TestHostnameHelper("abc.123", "abc.123"); + TestHostnameHelper("abc..123", "abc.123"); + TestHostnameHelper("trailing.", "trailing"); + TestHostnameHelper("i love trailing dots....", "i%20love%20trailing%20dots"); + TestHostnameHelper(".leading", "leading"); + TestHostnameHelper("..leading", "leading"); + TestHostnameHelper(".dots.", "dots"); + TestHostnameHelper(".both.", "both"); + TestHostnameHelper(".both..", "both"); + TestHostnameHelper("..both.", "both"); + TestHostnameHelper("..both..", "both"); + TestHostnameHelper("..a.b.c.d..", "a.b.c.d"); + TestHostnameHelper("..127.0.0.1..", "127.0.0.1"); + TestHostnameHelper("asdf!@#$a", "asdf!@#$a"); + TestHostnameHelper("AB CD 12354", "ab%20cd%2012354"); + TestHostnameHelper("\1\2\3\4\112\177", "%01%02%03%04j%7F"); + TestHostnameHelper("<>.AS/-+", "<>.as/-+"); + // Added in bug 489455. %00 should no longer be changed to %01. + TestHostnameHelper("%00", "%00"); +} + +TEST(UrlClassifierUtils, LongHostname) +{ + static const int kTestSize = 1024 * 150; + char *str = static_cast<char*>(malloc(kTestSize + 1)); + memset(str, 'x', kTestSize); + str[kTestSize] = '\0'; + + RefPtr<nsUrlClassifierUtils> utils = new nsUrlClassifierUtils; + utils->Init(); + + nsAutoCString out; + nsDependentCString in(str); + PRIntervalTime clockStart = PR_IntervalNow(); + utils->CanonicalizeHostname(in, out); + PRIntervalTime clockEnd = PR_IntervalNow(); + + CheckEquals(in, out); + + printf("CanonicalizeHostname on long string (%dms)\n", + PR_IntervalToMilliseconds(clockEnd - clockStart)); +} diff --git a/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp b/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp new file mode 100644 index 000000000..9e380a9d3 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp @@ -0,0 +1,559 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include <mozilla/RefPtr.h> +#include "nsString.h" +#include "nsTArray.h" +#include "nsClassHashtable.h" +#include "VariableLengthPrefixSet.h" +#include "nsAppDirectoryServiceDefs.h" +#include "nsIFile.h" +#include "gtest/gtest.h" + +using namespace mozilla::safebrowsing; + +typedef nsCString _Prefix; +typedef nsTArray<_Prefix> _PrefixArray; + +// Create fullhash by appending random characters. +static nsCString* CreateFullHash(const nsACString& in) +{ + nsCString* out = new nsCString(in); + out->SetLength(32); + for (size_t i = in.Length(); i < 32; i++) { + out->SetCharAt(char(rand() % 256), i); + } + + return out; +} + +// This function generate N prefixes with size between MIN and MAX. +// The output array will not be cleared, random result will append to it +static void RandomPrefixes(uint32_t N, uint32_t MIN, uint32_t MAX, _PrefixArray& array) +{ + array.SetCapacity(array.Length() + N); + + uint32_t range = (MAX - MIN + 1); + + for (uint32_t i = 0; i < N; i++) { + uint32_t prefixSize = (rand() % range) + MIN; + _Prefix prefix; + prefix.SetLength(prefixSize); + + bool added = false; + while(!added) { + char* dst = prefix.BeginWriting(); + for (uint32_t j = 0; j < prefixSize; j++) { + dst[j] = rand() % 256; + } + + if (!array.Contains(prefix)) { + array.AppendElement(prefix); + added = true; + } + } + } +} + +static void CheckContent(VariableLengthPrefixSet* pset, + PrefixStringMap& expected) +{ + PrefixStringMap vlPSetMap; + pset->GetPrefixes(vlPSetMap); + + for (auto iter = vlPSetMap.Iter(); !iter.Done(); iter.Next()) { + nsCString* expectedPrefix = expected.Get(iter.Key()); + nsCString* resultPrefix = iter.Data(); + + ASSERT_TRUE(resultPrefix->Equals(*expectedPrefix)); + } +} + +// This test loops through all the prefixes and converts each prefix to +// fullhash by appending random characters, each converted fullhash +// should at least match its original length in the prefixSet. +static void DoExpectedLookup(VariableLengthPrefixSet* pset, + _PrefixArray& array) +{ + uint32_t matchLength = 0; + for (uint32_t i = 0; i < array.Length(); i++) { + const nsCString& prefix = array[i]; + UniquePtr<nsCString> fullhash(CreateFullHash(prefix)); + + // Find match for prefix-generated full hash + pset->Matches(*fullhash, &matchLength); + MOZ_ASSERT(matchLength != 0); + + if (matchLength != prefix.Length()) { + // Return match size is not the same as prefix size. + // In this case it could be because the generated fullhash match other + // prefixes, check if this prefix exist. + bool found = false; + + for (uint32_t j = 0; j < array.Length(); j++) { + if (array[j].Length() != matchLength) { + continue; + } + + if (0 == memcmp(fullhash->BeginReading(), + array[j].BeginReading(), + matchLength)) { + found = true; + break; + } + } + ASSERT_TRUE(found); + } + } +} + +static void DoRandomLookup(VariableLengthPrefixSet* pset, + uint32_t N, + _PrefixArray& array) +{ + for (uint32_t i = 0; i < N; i++) { + // Random 32-bytes test fullhash + char buf[32]; + for (uint32_t j = 0; j < 32; j++) { + buf[j] = (char)(rand() % 256); + } + + // Get the expected result. + nsTArray<uint32_t> expected; + for (uint32_t j = 0; j < array.Length(); j++) { + const nsACString& str = array[j]; + if (0 == memcmp(buf, str.BeginReading(), str.Length())) { + expected.AppendElement(str.Length()); + } + } + + uint32_t matchLength = 0; + pset->Matches(nsDependentCSubstring(buf, 32), &matchLength); + + ASSERT_TRUE(expected.IsEmpty() ? !matchLength : expected.Contains(matchLength)); + } +} + +static void SetupPrefixMap(const _PrefixArray& array, + PrefixStringMap& map) +{ + map.Clear(); + + // Buckets are keyed by prefix length and contain an array of + // all prefixes of that length. + nsClassHashtable<nsUint32HashKey, _PrefixArray> table; + + for (uint32_t i = 0; i < array.Length(); i++) { + _PrefixArray* prefixes = table.Get(array[i].Length()); + if (!prefixes) { + prefixes = new _PrefixArray(); + table.Put(array[i].Length(), prefixes); + } + + prefixes->AppendElement(array[i]); + } + + // The resulting map entries will be a concatenation of all + // prefix data for the prefixes of a given size. + for (auto iter = table.Iter(); !iter.Done(); iter.Next()) { + uint32_t size = iter.Key(); + uint32_t count = iter.Data()->Length(); + + _Prefix* str = new _Prefix(); + str->SetLength(size * count); + + char* dst = str->BeginWriting(); + + iter.Data()->Sort(); + for (uint32_t i = 0; i < count; i++) { + memcpy(dst, iter.Data()->ElementAt(i).get(), size); + dst += size; + } + + map.Put(size, str); + } +} + + +// Test setting prefix set with only 4-bytes prefixes +TEST(VariableLengthPrefixSet, FixedLengthSet) +{ + srand(time(nullptr)); + + RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet; + pset->Init(NS_LITERAL_CSTRING("test")); + + PrefixStringMap map; + _PrefixArray array = { _Prefix("alph"), _Prefix("brav"), _Prefix("char"), + _Prefix("delt"), _Prefix("echo"), _Prefix("foxt"), + }; + + SetupPrefixMap(array, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array); + + DoRandomLookup(pset, 1000, array); + + CheckContent(pset, map); + + // Run random test + array.Clear(); + map.Clear(); + + RandomPrefixes(1500, 4, 4, array); + + SetupPrefixMap(array, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array); + + DoRandomLookup(pset, 1000, array); + + CheckContent(pset, map); +} + +// Test setting prefix set with only 5~32 bytes prefixes +TEST(VariableLengthPrefixSet, VariableLengthSet) +{ + RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet; + pset->Init(NS_LITERAL_CSTRING("test")); + + PrefixStringMap map; + _PrefixArray array = { _Prefix("bravo"), _Prefix("charlie"), _Prefix("delta"), + _Prefix("EchoEchoEchoEchoEcho"), _Prefix("foxtrot"), + _Prefix("GolfGolfGolfGolfGolfGolfGolfGolf"), + _Prefix("hotel"), _Prefix("november"), + _Prefix("oscar"), _Prefix("quebec"), _Prefix("romeo"), + _Prefix("sierrasierrasierrasierrasierra"), + _Prefix("Tango"), _Prefix("whiskey"), _Prefix("yankee"), + _Prefix("ZuluZuluZuluZulu") + }; + + SetupPrefixMap(array, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array); + + DoRandomLookup(pset, 1000, array); + + CheckContent(pset, map); + + // Run random test + array.Clear(); + map.Clear(); + + RandomPrefixes(1500, 5, 32, array); + + SetupPrefixMap(array, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array); + + DoRandomLookup(pset, 1000, array); + + CheckContent(pset, map); + +} + +// Test setting prefix set with both 4-bytes prefixes and 5~32 bytes prefixes +TEST(VariableLengthPrefixSet, MixedPrefixSet) +{ + RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet; + pset->Init(NS_LITERAL_CSTRING("test")); + + PrefixStringMap map; + _PrefixArray array = { _Prefix("enus"), _Prefix("apollo"), _Prefix("mars"), + _Prefix("Hecatonchires cyclopes"), + _Prefix("vesta"), _Prefix("neptunus"), _Prefix("jupiter"), + _Prefix("diana"), _Prefix("minerva"), _Prefix("ceres"), + _Prefix("Aidos,Adephagia,Adikia,Aletheia"), + _Prefix("hecatonchires"), _Prefix("alcyoneus"), _Prefix("hades"), + _Prefix("vulcanus"), _Prefix("juno"), _Prefix("mercury"), + _Prefix("Stheno, Euryale and Medusa") + }; + + SetupPrefixMap(array, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array); + + DoRandomLookup(pset, 1000, array); + + CheckContent(pset, map); + + // Run random test + array.Clear(); + map.Clear(); + + RandomPrefixes(1500, 4, 32, array); + + SetupPrefixMap(array, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array); + + DoRandomLookup(pset, 1000, array); + + CheckContent(pset, map); +} + +// Test resetting prefix set +TEST(VariableLengthPrefixSet, ResetPrefix) +{ + RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet; + pset->Init(NS_LITERAL_CSTRING("test")); + + // First prefix set + _PrefixArray array1 = { _Prefix("Iceland"), _Prefix("Peru"), _Prefix("Mexico"), + _Prefix("Australia"), _Prefix("Japan"), _Prefix("Egypt"), + _Prefix("America"), _Prefix("Finland"), _Prefix("Germany"), + _Prefix("Italy"), _Prefix("France"), _Prefix("Taiwan"), + }; + { + PrefixStringMap map; + + SetupPrefixMap(array1, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array1); + } + + // Second + _PrefixArray array2 = { _Prefix("Pikachu"), _Prefix("Bulbasaur"), _Prefix("Charmander"), + _Prefix("Blastoise"), _Prefix("Pidgey"), _Prefix("Mewtwo"), + _Prefix("Jigglypuff"), _Prefix("Persian"), _Prefix("Tentacool"), + _Prefix("Onix"), _Prefix("Eevee"), _Prefix("Jynx"), + }; + { + PrefixStringMap map; + + SetupPrefixMap(array2, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array2); + } + + // Should not match any of the first prefix set + uint32_t matchLength = 0; + for (uint32_t i = 0; i < array1.Length(); i++) { + UniquePtr<nsACString> fullhash(CreateFullHash(array1[i])); + + pset->Matches(*fullhash, &matchLength); + ASSERT_TRUE(matchLength == 0); + } +} + +// Test only set one 4-bytes prefix and one full-length prefix +TEST(VariableLengthPrefixSet, TinyPrefixSet) +{ + RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet; + pset->Init(NS_LITERAL_CSTRING("test")); + + PrefixStringMap map; + _PrefixArray array = { _Prefix("AAAA"), + _Prefix("11112222333344445555666677778888"), + }; + + SetupPrefixMap(array, map); + pset->SetPrefixes(map); + + DoExpectedLookup(pset, array); + + DoRandomLookup(pset, 1000, array); + + CheckContent(pset, map); +} + +// Test empty prefix set and IsEmpty function +TEST(VariableLengthPrefixSet, EmptyPrefixSet) +{ + RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet; + pset->Init(NS_LITERAL_CSTRING("test")); + + bool empty; + pset->IsEmpty(&empty); + ASSERT_TRUE(empty); + + PrefixStringMap map; + _PrefixArray array1; + + // Lookup an empty array should never match + DoRandomLookup(pset, 100, array1); + + // Insert an 4-bytes prefix, then IsEmpty should return false + _PrefixArray array2 = { _Prefix("test") }; + SetupPrefixMap(array2, map); + pset->SetPrefixes(map); + + pset->IsEmpty(&empty); + ASSERT_TRUE(!empty); + + _PrefixArray array3 = { _Prefix("test variable length") }; + + // Insert an 5~32 bytes prefix, then IsEmpty should return false + SetupPrefixMap(array3, map); + pset->SetPrefixes(map); + + pset->IsEmpty(&empty); + ASSERT_TRUE(!empty); +} + +// Test prefix size should only between 4~32 bytes +TEST(VariableLengthPrefixSet, MinMaxPrefixSet) +{ + RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet; + pset->Init(NS_LITERAL_CSTRING("test")); + + PrefixStringMap map; + { + _PrefixArray array = { _Prefix("1234"), + _Prefix("ABCDEFGHIJKKMNOP"), + _Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh") }; + + SetupPrefixMap(array, map); + nsresult rv = pset->SetPrefixes(map); + ASSERT_TRUE(rv == NS_OK); + } + + // Prefix size less than 4-bytes should fail + { + _PrefixArray array = { _Prefix("123") }; + + SetupPrefixMap(array, map); + nsresult rv = pset->SetPrefixes(map); + ASSERT_TRUE(NS_FAILED(rv)); + } + + // Prefix size greater than 32-bytes should fail + { + _PrefixArray array = { _Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh9") }; + + SetupPrefixMap(array, map); + nsresult rv = pset->SetPrefixes(map); + ASSERT_TRUE(NS_FAILED(rv)); + } +} + +// Test save then load prefix set with only 4-bytes prefixes +TEST(VariableLengthPrefixSet, LoadSaveFixedLengthPrefixSet) +{ + RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet; + save->Init(NS_LITERAL_CSTRING("test-save")); + + _PrefixArray array; + RandomPrefixes(10000, 4, 4, array); + + PrefixStringMap map; + SetupPrefixMap(array, map); + save->SetPrefixes(map); + + DoExpectedLookup(save, array); + + DoRandomLookup(save, 1000, array); + + CheckContent(save, map); + + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, + getter_AddRefs(file)); + file->Append(NS_LITERAL_STRING("test.vlpset")); + + save->StoreToFile(file); + + RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet; + load->Init(NS_LITERAL_CSTRING("test-load")); + + load->LoadFromFile(file); + + DoExpectedLookup(load, array); + + DoRandomLookup(load, 1000, array); + + CheckContent(load, map); + + file->Remove(false); +} + +// Test save then load prefix set with only 5~32 bytes prefixes +TEST(VariableLengthPrefixSet, LoadSaveVariableLengthPrefixSet) +{ + RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet; + save->Init(NS_LITERAL_CSTRING("test-save")); + + _PrefixArray array; + RandomPrefixes(10000, 5, 32, array); + + PrefixStringMap map; + SetupPrefixMap(array, map); + save->SetPrefixes(map); + + DoExpectedLookup(save, array); + + DoRandomLookup(save, 1000, array); + + CheckContent(save, map); + + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, + getter_AddRefs(file)); + file->Append(NS_LITERAL_STRING("test.vlpset")); + + save->StoreToFile(file); + + RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet; + load->Init(NS_LITERAL_CSTRING("test-load")); + + load->LoadFromFile(file); + + DoExpectedLookup(load, array); + + DoRandomLookup(load, 1000, array); + + CheckContent(load, map); + + file->Remove(false); +} + +// Test save then load prefix with both 4 bytes prefixes and 5~32 bytes prefixes +TEST(VariableLengthPrefixSet, LoadSavePrefixSet) +{ + RefPtr<VariableLengthPrefixSet> save = new VariableLengthPrefixSet; + save->Init(NS_LITERAL_CSTRING("test-save")); + + // Try to simulate the real case that most prefixes are 4bytes + _PrefixArray array; + RandomPrefixes(20000, 4, 4, array); + RandomPrefixes(1000, 5, 32, array); + + PrefixStringMap map; + SetupPrefixMap(array, map); + save->SetPrefixes(map); + + DoExpectedLookup(save, array); + + DoRandomLookup(save, 1000, array); + + CheckContent(save, map); + + nsCOMPtr<nsIFile> file; + NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, + getter_AddRefs(file)); + file->Append(NS_LITERAL_STRING("test.vlpset")); + + save->StoreToFile(file); + + RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet; + load->Init(NS_LITERAL_CSTRING("test-load")); + + load->LoadFromFile(file); + + DoExpectedLookup(load, array); + + DoRandomLookup(load, 1000, array); + + CheckContent(load, map); + + file->Remove(false); +} diff --git a/toolkit/components/url-classifier/tests/gtest/moz.build b/toolkit/components/url-classifier/tests/gtest/moz.build new file mode 100644 index 000000000..e66af9024 --- /dev/null +++ b/toolkit/components/url-classifier/tests/gtest/moz.build @@ -0,0 +1,27 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +LOCAL_INCLUDES += [ + '../..', +] + +UNIFIED_SOURCES += [ + 'Common.cpp', + 'TestChunkSet.cpp', + 'TestFailUpdate.cpp', + 'TestLookupCacheV4.cpp', + 'TestPerProviderDirectory.cpp', + 'TestProtocolParser.cpp', + 'TestRiceDeltaDecoder.cpp', + 'TestSafebrowsingHash.cpp', + 'TestSafeBrowsingProtobuf.cpp', + 'TestTable.cpp', + 'TestUrlClassifierTableUpdateV4.cpp', + 'TestUrlClassifierUtils.cpp', + 'TestVariableLengthPrefixSet.cpp', +] + +FINAL_LIBRARY = 'xul-gtest' |