summaryrefslogtreecommitdiffstats
path: root/chrome
diff options
context:
space:
mode:
authormattm@chromium.org <mattm@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-04-27 01:26:25 +0000
committermattm@chromium.org <mattm@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-04-27 01:26:25 +0000
commit031b72bf7e6d4ae675631cdd1958f20dcda6f6cb (patch)
treed1a2426d31c34afe9878494756223452bab238eb /chrome
parentc46e187a8537f14d0af3f1879033c129a70ba0e2 (diff)
downloadchromium_src-031b72bf7e6d4ae675631cdd1958f20dcda6f6cb.zip
chromium_src-031b72bf7e6d4ae675631cdd1958f20dcda6f6cb.tar.gz
chromium_src-031b72bf7e6d4ae675631cdd1958f20dcda6f6cb.tar.bz2
Download Safebrowsing url matching should use the standard algorithm, not a direct comparison.
BUG=80509 TEST=unittest, try downloading http://download.safebrowsingtest.com/download/test?foo Review URL: http://codereview.chromium.org/6904019 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@83112 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome')
-rw-r--r--chrome/browser/safe_browsing/safe_browsing_database.cc28
-rw-r--r--chrome/browser/safe_browsing/safe_browsing_database_unittest.cc14
2 files changed, 24 insertions, 18 deletions
diff --git a/chrome/browser/safe_browsing/safe_browsing_database.cc b/chrome/browser/safe_browsing/safe_browsing_database.cc
index 4437099..16c4b38 100644
--- a/chrome/browser/safe_browsing/safe_browsing_database.cc
+++ b/chrome/browser/safe_browsing/safe_browsing_database.cc
@@ -63,23 +63,6 @@ int EncodeChunkId(const int chunk, const int list_id) {
return chunk << 1 | list_id % 2;
}
-// Get the prefixes matching the download |urls|.
-void GetDownloadUrlPrefixes(const std::vector<GURL>& urls,
- std::vector<SBPrefix>* prefixes) {
- for (size_t i = 0; i < urls.size(); ++i) {
- const GURL& url = urls[i];
- std::string hostname;
- std::string path;
- std::string query;
- safe_browsing_util::CanonicalizeUrl(url, &hostname, &path, &query);
-
- SBFullHash full_hash;
- crypto::SHA256HashString(hostname + path + query, &full_hash,
- sizeof(full_hash));
- prefixes->push_back(full_hash.prefix);
- }
-}
-
// Generate the set of full hashes to check for |url|. If
// |include_whitelist_hashes| is true we will generate additional path-prefixes
// to match against the csd whitelist. E.g., if the path-prefix /foo is on the
@@ -125,6 +108,17 @@ void BrowseFullHashesToCheck(const GURL& url,
}
}
+// Get the prefixes matching the download |urls|.
+void GetDownloadUrlPrefixes(const std::vector<GURL>& urls,
+ std::vector<SBPrefix>* prefixes) {
+ std::vector<SBFullHash> full_hashes;
+ for (size_t i = 0; i < urls.size(); ++i)
+ BrowseFullHashesToCheck(urls[i], false, &full_hashes);
+
+ for (size_t i = 0; i < full_hashes.size(); ++i)
+ prefixes->push_back(full_hashes[i].prefix);
+}
+
// Find the entries in |full_hashes| with prefix in |prefix_hits|, and
// add them to |full_hits| if not expired. "Not expired" is when
// either |last_update| was recent enough, or the item has been
diff --git a/chrome/browser/safe_browsing/safe_browsing_database_unittest.cc b/chrome/browser/safe_browsing/safe_browsing_database_unittest.cc
index cea7a19..9a446a0 100644
--- a/chrome/browser/safe_browsing/safe_browsing_database_unittest.cc
+++ b/chrome/browser/safe_browsing/safe_browsing_database_unittest.cc
@@ -1128,7 +1128,7 @@ TEST_F(SafeBrowsingDatabaseTest, ContainsDownloadUrl) {
database_->Init(database_filename_);
const char kEvil1Host[] = "www.evil1.com/";
- const char kEvil1Url1[] = "www.evil1.com/download1.html";
+ const char kEvil1Url1[] = "www.evil1.com/download1/";
const char kEvil1Url2[] = "www.evil1.com/download2.html";
SBChunkList chunks;
@@ -1168,6 +1168,18 @@ TEST_F(SafeBrowsingDatabaseTest, ContainsDownloadUrl) {
urls[0] = GURL("http://www.randomevil.com");
EXPECT_FALSE(database_->ContainsDownloadUrl(urls, &prefix_hits));
+ // Should match with query args stripped.
+ urls[0] = GURL(std::string("http://") + kEvil1Url2 + "?blah");
+ EXPECT_TRUE(database_->ContainsDownloadUrl(urls, &prefix_hits));
+ ASSERT_EQ(prefix_hits.size(), 1U);
+ EXPECT_EQ(prefix_hits[0], Sha256Prefix(kEvil1Url2));
+
+ // Should match with extra path stuff and query args stripped.
+ urls[0] = GURL(std::string("http://") + kEvil1Url1 + "foo/bar?blah");
+ EXPECT_TRUE(database_->ContainsDownloadUrl(urls, &prefix_hits));
+ ASSERT_EQ(prefix_hits.size(), 1U);
+ EXPECT_EQ(prefix_hits[0], Sha256Prefix(kEvil1Url1));
+
// First hit in redirect chain is malware.
urls.clear();
urls.push_back(GURL(std::string("http://") + kEvil1Url1));