summaryrefslogtreecommitdiffstats
path: root/chrome
diff options
context:
space:
mode:
authorthakis@chromium.org <thakis@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-07-09 17:50:49 +0000
committerthakis@chromium.org <thakis@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-07-09 17:50:49 +0000
commitd565f6a80589670b0b81496de11d2a2e0d363a2f (patch)
tree72dcd6f127dcbfde549b68accda7ce15e9714bb8 /chrome
parent25de9c108cb35a2cc5eb1209b65fc38060097bea (diff)
downloadchromium_src-d565f6a80589670b0b81496de11d2a2e0d363a2f.zip
chromium_src-d565f6a80589670b0b81496de11d2a2e0d363a2f.tar.gz
chromium_src-d565f6a80589670b0b81496de11d2a2e0d363a2f.tar.bz2
Remove unneccessary explicit blacklist.
The heuristic should catch these. Keeping the list just makes it harder to find problems with the heuristic. BUG=none TEST=Still no infobars for single-word queries on any networks. Review URL: http://codereview.chromium.org/2917007 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@51979 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome')
-rw-r--r--chrome/browser/alternate_nav_url_fetcher.cc34
1 files changed, 9 insertions, 25 deletions
diff --git a/chrome/browser/alternate_nav_url_fetcher.cc b/chrome/browser/alternate_nav_url_fetcher.cc
index fc21226..ebe136e 100644
--- a/chrome/browser/alternate_nav_url_fetcher.cc
+++ b/chrome/browser/alternate_nav_url_fetcher.cc
@@ -137,36 +137,20 @@ void AlternateNavURLFetcher::SetStatusFromURLFetch(
// HTTP 2xx, 401, and 407 all indicate that the target address exists.
(((response_code / 100) != 2) &&
(response_code != 401) && (response_code != 407)) ||
- // Fail if we're redirected to a common location. This is the "automatic
- // heuristic" version of the explicit blacklist below; see comments there.
+ // Fail if we're redirected to a common location.
+ // This happens for ISPs/DNS providers/etc. who return
+ // provider-controlled pages to arbitrary user navigation attempts.
+ // Because this can result in infobars on large fractions of user
+ // searches, we don't show automatic infobars for these. Note that users
+ // can still choose to explicitly navigate to or search for pages in
+ // these domains, and can still get infobars for cases that wind up on
+ // other domains (e.g. legit intranet sites), we're just trying to avoid
+ // erroneously harassing the user with our own UI prompts.
net::RegistryControlledDomainService::SameDomainOrHost(url,
IntranetRedirectDetector::RedirectOrigin())) {
state_ = FAILED;
return;
}
-
- // The following TLD+1s are used as destinations by ISPs/DNS providers/etc.
- // who return provider-controlled pages to arbitrary user navigation attempts.
- // Because this can result in infobars on large fractions of user searches, we
- // don't show automatic infobars for these. Note that users can still choose
- // to explicitly navigate to or search for pages in these domains, and can
- // still get infobars for cases that wind up on other domains (e.g. legit
- // intranet sites), we're just trying to avoid erroneously harassing the user
- // with our own UI prompts.
- const char* kBlacklistedSites[] = {
- // NOTE: Use complete URLs, because GURL() doesn't do fixup!
- "http://comcast.com/",
- "http://opendns.com/",
- "http://verizon.net/",
- };
- for (size_t i = 0; i < arraysize(kBlacklistedSites); ++i) {
- if (net::RegistryControlledDomainService::SameDomainOrHost(url,
- GURL(kBlacklistedSites[i]))) {
- state_ = FAILED;
- return;
- }
- }
-
state_ = SUCCEEDED;
}