diff options
author | joi@chromium.org <joi@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-08-17 17:24:00 +0000 |
---|---|---|
committer | joi@chromium.org <joi@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-08-17 17:24:00 +0000 |
commit | 41d3846d2f324a140e5e841bad5f53e156b40d15 (patch) | |
tree | 00ba2ed55c8c9e5a01e98fec82673a64b34cfd33 /net/url_request | |
parent | d18be496b6e7160ef944c88f765ba977eb12ca03 (diff) | |
download | chromium_src-41d3846d2f324a140e5e841bad5f53e156b40d15.zip chromium_src-41d3846d2f324a140e5e841bad5f53e156b40d15.tar.gz chromium_src-41d3846d2f324a140e5e841bad5f53e156b40d15.tar.bz2 |
This CL will introduce a new way to do exponential back-off on failure within
Chromium. It is a network level implementation and should constitute a good
enough bottleneck to manage every outgoing http request.
Committing for malavv@google.com.
Original review: http://codereview.chromium.org/2487001/show
BUG=none
TEST=unit tests
Review URL: http://codereview.chromium.org/3005049
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@56376 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r-- | net/url_request/url_request_http_job.cc | 33 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.h | 5 |
2 files changed, 29 insertions, 9 deletions
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc index cbccff2..5564628 100644 --- a/net/url_request/url_request_http_job.cc +++ b/net/url_request/url_request_http_job.cc @@ -27,6 +27,7 @@ #include "net/http/http_transaction.h" #include "net/http/http_transaction_factory.h" #include "net/http/http_util.h" +#include "net/request_throttler/request_throttler_header_adapter.h" #include "net/url_request/https_prober.h" #include "net/url_request/url_request.h" #include "net/url_request/url_request_context.h" @@ -91,6 +92,8 @@ URLRequestHttpJob::URLRequestHttpJob(URLRequest* request) this, &URLRequestHttpJob::OnReadCompleted)), read_in_progress_(false), transaction_(NULL), + throttling_entry_(Singleton<RequestThrottlerManager>::get()-> + RegisterRequestUrl(request->url())), sdch_dictionary_advertised_(false), sdch_test_activated_(false), sdch_test_control_(false), @@ -569,6 +572,11 @@ void URLRequestHttpJob::NotifyHeadersComplete() { // also need this info. is_cached_content_ = response_info_->was_cached; + if (!is_cached_content_) { + RequestThrottlerHeaderAdapter response_adapter(response_info_->headers); + throttling_entry_->UpdateWithResponse(&response_adapter); + } + ProcessStrictTransportSecurityHeader(); if (SdchManager::Global() && @@ -616,30 +624,37 @@ void URLRequestHttpJob::StartTransaction() { // If we already have a transaction, then we should restart the transaction // with auth provided by username_ and password_. - int rv; + int return_value; + if (transaction_.get()) { - rv = transaction_->RestartWithAuth(username_, password_, &start_callback_); + return_value = transaction_->RestartWithAuth(username_, + password_, &start_callback_); username_.clear(); password_.clear(); } else { DCHECK(request_->context()); DCHECK(request_->context()->http_transaction_factory()); - rv = request_->context()->http_transaction_factory()->CreateTransaction( - &transaction_); - if (rv == net::OK) { - rv = transaction_->Start( - &request_info_, &start_callback_, request_->net_log()); + return_value = request_->context()->http_transaction_factory()-> + CreateTransaction(&transaction_); + if (return_value == net::OK) { + if (throttling_entry_->IsRequestAllowed()) { + return_value = transaction_->Start( + &request_info_, &start_callback_, request_->net_log()); + } else { + // Special error code for the exponential back-off module. + return_value = net::ERR_TEMPORARILY_THROTTLED_BY_DDOS; + } } } - if (rv == net::ERR_IO_PENDING) + if (return_value == net::ERR_IO_PENDING) return; // The transaction started synchronously, but we need to notify the // URLRequest delegate via the message loop. MessageLoop::current()->PostTask(FROM_HERE, NewRunnableMethod( - this, &URLRequestHttpJob::OnStartCompleted, rv)); + this, &URLRequestHttpJob::OnStartCompleted, return_value)); } void URLRequestHttpJob::AddExtraHeaders() { diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h index ea2e544..79511f2 100644 --- a/net/url_request/url_request_http_job.h +++ b/net/url_request/url_request_http_job.h @@ -11,10 +11,12 @@ #include <vector> #include "base/scoped_ptr.h" +#include "base/string_util.h" #include "base/string16.h" #include "net/base/auth.h" #include "net/base/completion_callback.h" #include "net/http/http_request_info.h" +#include "net/request_throttler/request_throttler_manager.h" #include "net/url_request/url_request_job.h" namespace net { @@ -113,6 +115,9 @@ class URLRequestHttpJob : public URLRequestJob { scoped_ptr<net::HttpTransaction> transaction_; + // This is used to supervise traffic and enforce exponential back-off. + scoped_refptr<RequestThrottlerEntryInterface> throttling_entry_; + // Indicated if an SDCH dictionary was advertised, and hence an SDCH // compressed response is expected. We use this to help detect (accidental?) // proxy corruption of a response, which sometimes marks SDCH content as |