diff options
author | joi@chromium.org <joi@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-11-25 02:29:06 +0000 |
---|---|---|
committer | joi@chromium.org <joi@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-11-25 02:29:06 +0000 |
commit | 6b3f964f5de20d1d5d567bf67d16f5b246ac0299 (patch) | |
tree | dc0061b10ce3b9a3e52a2eb2e9784d1bad141da8 /net/url_request/url_request_throttler_manager.cc | |
parent | 9b41006d15b05b373724cce02c8b458cf173c9b9 (diff) | |
download | chromium_src-6b3f964f5de20d1d5d567bf67d16f5b246ac0299.zip chromium_src-6b3f964f5de20d1d5d567bf67d16f5b246ac0299.tar.gz chromium_src-6b3f964f5de20d1d5d567bf67d16f5b246ac0299.tar.bz2 |
Implement exponential back-off mechanism.
Contributed by yzshen@google.com, original review http://codereview.chromium.org/4194001/
Implement exponential back-off mechanism. Enforce it at the URLRequestHttpJob level for all outgoing HTTP requests.
The reason why to make this change is that we need back-off logic at a lower enough level to manage all outgoing HTTP traffic, so that the browser won't cause any DDoS attack.
This change:
1) patches http://codereview.chromium.org/2487001/show, which is the exponential back-off implementation.
2) resolves conflicts with URLFetcher, by removing its own back-off logic:
-- removes url_fetcher_protect.{h,cc};
-- integrates the sliding window mechanism of URLFetcherProtectEntry into RequestThrottlerEntry.
3) resolves conflicts with CloudPrintURLFetcher.
4) makes unit tests of CloudPrintURLFetcher, URLFetcher and URLRequest work.
BUG=none
TEST=pass all existing tests and also the newly-added request_throttler_unittest.cc
Review URL: http://codereview.chromium.org/5276007
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@67375 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request/url_request_throttler_manager.cc')
-rw-r--r-- | net/url_request/url_request_throttler_manager.cc | 107 |
1 files changed, 107 insertions, 0 deletions
diff --git a/net/url_request/url_request_throttler_manager.cc b/net/url_request/url_request_throttler_manager.cc new file mode 100644 index 0000000..5428d9a --- /dev/null +++ b/net/url_request/url_request_throttler_manager.cc @@ -0,0 +1,107 @@ +// Copyright (c) 2010 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "net/url_request/url_request_throttler_manager.h" + +#include "base/string_util.h" + +namespace net { + +const unsigned int URLRequestThrottlerManager::kMaximumNumberOfEntries = 1500; +const unsigned int URLRequestThrottlerManager::kRequestsBetweenCollecting = 200; + +URLRequestThrottlerManager* URLRequestThrottlerManager::GetInstance() { + return Singleton<URLRequestThrottlerManager>::get(); +} + +scoped_refptr<URLRequestThrottlerEntryInterface> + URLRequestThrottlerManager::RegisterRequestUrl(const GURL &url) { + // Normalize the url. + std::string url_id = GetIdFromUrl(url); + + // Periodically garbage collect old entries. + GarbageCollectEntriesIfNecessary(); + + // Find the entry in the map or create it. + scoped_refptr<URLRequestThrottlerEntry>& entry = url_entries_[url_id]; + if (entry == NULL) + entry = new URLRequestThrottlerEntry(); + + return entry; +} + +URLRequestThrottlerManager::URLRequestThrottlerManager() + : requests_since_last_gc_(0) { +} + +URLRequestThrottlerManager::~URLRequestThrottlerManager() { + // Delete all entries. + url_entries_.clear(); +} + +std::string URLRequestThrottlerManager::GetIdFromUrl(const GURL& url) const { + if (!url.is_valid()) + return url.possibly_invalid_spec(); + + if (url_id_replacements_ == NULL) { + url_id_replacements_.reset(new GURL::Replacements()); + + url_id_replacements_->ClearPassword(); + url_id_replacements_->ClearUsername(); + url_id_replacements_->ClearQuery(); + url_id_replacements_->ClearRef(); + } + + GURL id = url.ReplaceComponents(*url_id_replacements_); + return StringToLowerASCII(id.spec()); +} + +void URLRequestThrottlerManager::GarbageCollectEntries() { + UrlEntryMap::iterator i = url_entries_.begin(); + + while (i != url_entries_.end()) { + if ((i->second)->IsEntryOutdated()) { + url_entries_.erase(i++); + } else { + ++i; + } + } + + // In case something broke we want to make sure not to grow indefinitely. + while (url_entries_.size() > kMaximumNumberOfEntries) { + url_entries_.erase(url_entries_.begin()); + } +} + +void URLRequestThrottlerManager::GarbageCollectEntriesIfNecessary() { + requests_since_last_gc_++; + if (requests_since_last_gc_ < kRequestsBetweenCollecting) + return; + + requests_since_last_gc_ = 0; + GarbageCollectEntries(); +} + +void URLRequestThrottlerManager::OverrideEntryForTests( + const GURL& url, + URLRequestThrottlerEntry* entry) { + if (entry == NULL) + return; + + // Normalize the url. + std::string url_id = GetIdFromUrl(url); + + // Periodically garbage collect old entries. + GarbageCollectEntriesIfNecessary(); + + url_entries_[url_id] = entry; +} + +void URLRequestThrottlerManager::EraseEntryForTests(const GURL& url) { + // Normalize the url. + std::string url_id = GetIdFromUrl(url); + url_entries_.erase(url_id); +} + +} // namespace net |