summaryrefslogtreecommitdiffstats
path: root/chrome/browser/net
diff options
context:
space:
mode:
Diffstat (limited to 'chrome/browser/net')
-rw-r--r--chrome/browser/net/url_fetcher.cc16
-rw-r--r--chrome/browser/net/url_fetcher.h25
-rw-r--r--chrome/browser/net/url_fetcher_unittest.cc76
3 files changed, 112 insertions, 5 deletions
diff --git a/chrome/browser/net/url_fetcher.cc b/chrome/browser/net/url_fetcher.cc
index a0a96eb..350dd50 100644
--- a/chrome/browser/net/url_fetcher.cc
+++ b/chrome/browser/net/url_fetcher.cc
@@ -110,7 +110,8 @@ URLFetcher::URLFetcher(const GURL& url,
RequestType request_type,
Delegate* d)
: ALLOW_THIS_IN_INITIALIZER_LIST(
- core_(new Core(this, url, request_type, d))) {
+ core_(new Core(this, url, request_type, d))),
+ automatically_retry_on_5xx_(true) {
}
URLFetcher::~URLFetcher() {
@@ -275,21 +276,24 @@ void URLFetcher::Core::OnCompletedURLRequest(const URLRequestStatus& status) {
if (response_code_ >= 500) {
// When encountering a server error, we will send the request again
// after backoff time.
- const int64 wait =
+ int64 back_off_time =
protect_entry_->UpdateBackoff(URLFetcherProtectEntry::FAILURE);
+ fetcher_->backoff_delay_ = base::TimeDelta::FromMilliseconds(back_off_time);
++num_retries_;
// Restarts the request if we still need to notify the delegate.
if (delegate_) {
- if (num_retries_ <= protect_entry_->max_retries()) {
+ if (fetcher_->automatically_retry_on_5xx_ &&
+ num_retries_ <= protect_entry_->max_retries()) {
ChromeThread::PostDelayedTask(
ChromeThread::IO, FROM_HERE,
- NewRunnableMethod(this, &Core::StartURLRequest), wait);
+ NewRunnableMethod(this, &Core::StartURLRequest), back_off_time);
} else {
delegate_->OnURLFetchComplete(fetcher_, url_, status, response_code_,
cookies_, data_);
}
}
} else {
+ fetcher_->backoff_delay_ = base::TimeDelta();
protect_entry_->UpdateBackoff(URLFetcherProtectEntry::SUCCESS);
if (delegate_)
delegate_->OnURLFetchComplete(fetcher_, url_, status, response_code_,
@@ -325,6 +329,10 @@ void URLFetcher::set_request_context(
core_->request_context_getter_ = request_context_getter;
}
+void URLFetcher::set_automatcally_retry_on_5xx(bool retry) {
+ automatically_retry_on_5xx_ = retry;
+}
+
net::HttpResponseHeaders* URLFetcher::response_headers() const {
return core_->response_headers_;
}
diff --git a/chrome/browser/net/url_fetcher.h b/chrome/browser/net/url_fetcher.h
index 695f808..b585124 100644
--- a/chrome/browser/net/url_fetcher.h
+++ b/chrome/browser/net/url_fetcher.h
@@ -15,6 +15,7 @@
#include "base/leak_tracker.h"
#include "base/message_loop.h"
#include "base/ref_counted.h"
+#include "base/time.h"
class GURL;
typedef std::vector<std::string> ResponseCookies;
@@ -135,6 +136,22 @@ class URLFetcher {
// request is started.
void set_request_context(URLRequestContextGetter* request_context_getter);
+ // If |retry| is false, 5xx responses will be propagated to the observer,
+ // if it is true URLFetcher will automatically re-execute the request,
+ // after backoff_delay() elapses. URLFetcher has it set to true by default.
+ void set_automatcally_retry_on_5xx(bool retry);
+
+ // Returns the back-off delay before the request will be retried,
+ // when a 5xx response was received.
+ base::TimeDelta backoff_delay() const { return backoff_delay_; }
+
+ // Sets the back-off delay, allowing to mock 5xx requests in unit-tests.
+#if defined(UNIT_TEST)
+ void set_backoff_delay(base::TimeDelta backoff_delay) {
+ backoff_delay_ = backoff_delay;
+ }
+#endif // defined(UNIT_TEST)
+
// Retrieve the response headers from the request. Must only be called after
// the OnURLFetchComplete callback has run.
virtual net::HttpResponseHeaders* response_headers() const;
@@ -162,6 +179,14 @@ class URLFetcher {
base::LeakTracker<URLFetcher> leak_tracker_;
+ // If |automatically_retry_on_5xx_| is false, 5xx responses will be
+ // propagated to the observer, if it is true URLFetcher will automatically
+ // re-execute the request, after the back-off delay has expired.
+ // true by default.
+ bool automatically_retry_on_5xx_;
+ // Back-off time delay. 0 by default.
+ base::TimeDelta backoff_delay_;
+
static bool g_interception_enabled;
DISALLOW_EVIL_CONSTRUCTORS(URLFetcher);
diff --git a/chrome/browser/net/url_fetcher_unittest.cc b/chrome/browser/net/url_fetcher_unittest.cc
index 664b319..079bad5 100644
--- a/chrome/browser/net/url_fetcher_unittest.cc
+++ b/chrome/browser/net/url_fetcher_unittest.cc
@@ -96,7 +96,7 @@ class URLFetcherHeadersTest : public URLFetcherTest {
const std::string& data);
};
-// Version of URLFetcherTest that tests overload proctection.
+// Version of URLFetcherTest that tests overload protection.
class URLFetcherProtectTest : public URLFetcherTest {
public:
virtual void CreateFetcher(const GURL& url);
@@ -111,6 +111,22 @@ class URLFetcherProtectTest : public URLFetcherTest {
Time start_time_;
};
+// Version of URLFetcherTest that tests overload protection, when responses
+// passed through.
+class URLFetcherProtectTestPassedThrough : public URLFetcherTest {
+ public:
+ virtual void CreateFetcher(const GURL& url);
+ // URLFetcher::Delegate
+ virtual void OnURLFetchComplete(const URLFetcher* source,
+ const GURL& url,
+ const URLRequestStatus& status,
+ int response_code,
+ const ResponseCookies& cookies,
+ const std::string& data);
+ private:
+ Time start_time_;
+};
+
// Version of URLFetcherTest that tests bad HTTPS requests.
class URLFetcherBadHTTPSTest : public URLFetcherTest {
public:
@@ -286,6 +302,41 @@ void URLFetcherProtectTest::OnURLFetchComplete(const URLFetcher* source,
}
}
+void URLFetcherProtectTestPassedThrough::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcher(url, URLFetcher::GET, this);
+ fetcher_->set_request_context(new TestURLRequestContextGetter());
+ fetcher_->set_automatcally_retry_on_5xx(false);
+ start_time_ = Time::Now();
+ fetcher_->Start();
+}
+
+void URLFetcherProtectTestPassedThrough::OnURLFetchComplete(
+ const URLFetcher* source,
+ const GURL& url,
+ const URLRequestStatus& status,
+ int response_code,
+ const ResponseCookies& cookies,
+ const std::string& data) {
+ const TimeDelta one_minute = TimeDelta::FromMilliseconds(60000);
+ if (response_code >= 500) {
+ // Now running ServerUnavailable test.
+ // It should get here on the first attempt, so almost immediately and
+ // *not* to attempt to execute all 11 requests (2.5 minutes).
+ EXPECT_TRUE(Time::Now() - start_time_ < one_minute);
+ EXPECT_TRUE(status.is_success());
+ // Check that suggested back off time is bigger than 0.
+ EXPECT_GT(fetcher_->backoff_delay().InMicroseconds(), 0);
+ EXPECT_FALSE(data.empty());
+ delete fetcher_;
+ ChromeThread::PostTask(
+ ChromeThread::IO, FROM_HERE, new MessageLoop::QuitTask());
+ } else {
+ // We should not get here!
+ FAIL();
+ }
+}
+
+
URLFetcherBadHTTPSTest::URLFetcherBadHTTPSTest() {
PathService::Get(base::DIR_SOURCE_ROOT, &cert_dir_);
cert_dir_ = cert_dir_.AppendASCII("chrome");
@@ -439,6 +490,29 @@ TEST_F(URLFetcherProtectTest, ServerUnavailable) {
MessageLoop::current()->Run();
}
+TEST_F(URLFetcherProtectTestPassedThrough, ServerUnavailablePropagateResponse) {
+ scoped_refptr<HTTPTestServer> server =
+ HTTPTestServer::CreateServer(L"chrome/test/data", NULL);
+ ASSERT_TRUE(NULL != server.get());
+ GURL url = GURL(server->TestServerPage("files/server-unavailable.html"));
+
+ // Registers an entry for test url. The backoff time is calculated by:
+ // new_backoff = 2.0 * old_backoff + 0
+ // and maximum backoff time is 256 milliseconds.
+ // Maximum retries allowed is set to 11.
+ URLFetcherProtectManager* manager = URLFetcherProtectManager::GetInstance();
+ // Total time if *not* for not doing automatic backoff would be 150s.
+ // In reality it should be "as soon as server responds".
+ URLFetcherProtectEntry* entry =
+ new URLFetcherProtectEntry(200, 3, 11, 100, 2.0, 0, 150000);
+ manager->Register(url.host(), entry);
+
+ CreateFetcher(url);
+
+ MessageLoop::current()->Run();
+}
+
+
TEST_F(URLFetcherBadHTTPSTest, BadHTTPSTest) {
scoped_refptr<HTTPSTestServer> server =
HTTPSTestServer::CreateExpiredServer(kDocRoot);