summaryrefslogtreecommitdiffstats
path: root/net/url_request
diff options
context:
space:
mode:
authorakalin@chromium.org <akalin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-06-15 07:51:46 +0000
committerakalin@chromium.org <akalin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-06-15 07:51:46 +0000
commit8368a0a348e5e445cefbee7fec023a988f21fa75 (patch)
tree78edcedcb80eb1e0a7099f0abee8b02719d5858b /net/url_request
parentc470e1b90271e7347962ea9641b91fe87a7ea034 (diff)
downloadchromium_src-8368a0a348e5e445cefbee7fec023a988f21fa75.zip
chromium_src-8368a0a348e5e445cefbee7fec023a988f21fa75.tar.gz
chromium_src-8368a0a348e5e445cefbee7fec023a988f21fa75.tar.bz2
Move URLFetcherImpl to net/
Add some TODOs for follow-up tasks. Leave URLFetcher::Create in content, but move it to url_fetcher.cc. BUG=118220 TEST= Review URL: https://chromiumcodereview.appspot.com/10534154 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@142355 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r--net/url_request/url_fetcher_core.h4
-rw-r--r--net/url_request/url_fetcher_impl.cc201
-rw-r--r--net/url_request/url_fetcher_impl.h123
-rw-r--r--net/url_request/url_fetcher_impl_unittest.cc1211
4 files changed, 1536 insertions, 3 deletions
diff --git a/net/url_request/url_fetcher_core.h b/net/url_request/url_fetcher_core.h
index a8532c2..f431d90 100644
--- a/net/url_request/url_fetcher_core.h
+++ b/net/url_request/url_fetcher_core.h
@@ -21,7 +21,6 @@
#include "base/timer.h"
#include "googleurl/src/gurl.h"
#include "net/base/host_port_pair.h"
-#include "net/base/net_export.h"
#include "net/http/http_request_headers.h"
#include "net/url_request/url_fetcher.h"
#include "net/url_request/url_request.h"
@@ -38,8 +37,7 @@ class URLFetcherDelegate;
class URLRequestContextGetter;
class URLRequestThrottlerEntryInterface;
-// TODO(akalin): Remove NET_EXPORT once URLFetcherImpl is in net/.
-class NET_EXPORT URLFetcherCore
+class URLFetcherCore
: public base::RefCountedThreadSafe<URLFetcherCore>,
public URLRequest::Delegate {
public:
diff --git a/net/url_request/url_fetcher_impl.cc b/net/url_request/url_fetcher_impl.cc
new file mode 100644
index 0000000..da07522
--- /dev/null
+++ b/net/url_request/url_fetcher_impl.cc
@@ -0,0 +1,201 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "net/url_request/url_fetcher_impl.h"
+
+#include "base/bind.h"
+#include "base/message_loop_proxy.h"
+#include "net/url_request/url_fetcher_core.h"
+#include "net/url_request/url_fetcher_factory.h"
+
+namespace net {
+
+static URLFetcherFactory* g_factory = NULL;
+
+URLFetcherImpl::URLFetcherImpl(const GURL& url,
+ RequestType request_type,
+ URLFetcherDelegate* d)
+ : ALLOW_THIS_IN_INITIALIZER_LIST(
+ core_(new URLFetcherCore(this, url, request_type, d))) {
+}
+
+URLFetcherImpl::~URLFetcherImpl() {
+ core_->Stop();
+}
+
+void URLFetcherImpl::SetUploadData(const std::string& upload_content_type,
+ const std::string& upload_content) {
+ core_->SetUploadData(upload_content_type, upload_content);
+}
+
+void URLFetcherImpl::SetChunkedUpload(const std::string& content_type) {
+ core_->SetChunkedUpload(content_type);
+}
+
+void URLFetcherImpl::AppendChunkToUpload(const std::string& data,
+ bool is_last_chunk) {
+ DCHECK(data.length());
+ core_->AppendChunkToUpload(data, is_last_chunk);
+}
+
+void URLFetcherImpl::SetReferrer(const std::string& referrer) {
+ core_->SetReferrer(referrer);
+}
+
+void URLFetcherImpl::SetLoadFlags(int load_flags) {
+ core_->SetLoadFlags(load_flags);
+}
+
+int URLFetcherImpl::GetLoadFlags() const {
+ return core_->GetLoadFlags();
+}
+
+void URLFetcherImpl::SetExtraRequestHeaders(
+ const std::string& extra_request_headers) {
+ core_->SetExtraRequestHeaders(extra_request_headers);
+}
+
+void URLFetcherImpl::AddExtraRequestHeader(const std::string& header_line) {
+ core_->AddExtraRequestHeader(header_line);
+}
+
+void URLFetcherImpl::GetExtraRequestHeaders(
+ HttpRequestHeaders* headers) const {
+ GetExtraRequestHeaders(headers);
+}
+
+void URLFetcherImpl::SetRequestContext(
+ URLRequestContextGetter* request_context_getter) {
+ core_->SetRequestContext(request_context_getter);
+}
+
+void URLFetcherImpl::SetFirstPartyForCookies(
+ const GURL& first_party_for_cookies) {
+ core_->SetFirstPartyForCookies(first_party_for_cookies);
+}
+
+void URLFetcherImpl::SetURLRequestUserData(
+ const void* key,
+ const CreateDataCallback& create_data_callback) {
+ core_->SetURLRequestUserData(key, create_data_callback);
+}
+
+void URLFetcherImpl::SetStopOnRedirect(bool stop_on_redirect) {
+ core_->SetStopOnRedirect(stop_on_redirect);
+}
+
+void URLFetcherImpl::SetAutomaticallyRetryOn5xx(bool retry) {
+ core_->SetAutomaticallyRetryOn5xx(retry);
+}
+
+void URLFetcherImpl::SetMaxRetries(int max_retries) {
+ core_->SetMaxRetries(max_retries);
+}
+
+int URLFetcherImpl::GetMaxRetries() const {
+ return core_->GetMaxRetries();
+}
+
+
+base::TimeDelta URLFetcherImpl::GetBackoffDelay() const {
+ return core_->GetBackoffDelay();
+}
+
+void URLFetcherImpl::SaveResponseToFileAtPath(
+ const FilePath& file_path,
+ scoped_refptr<base::MessageLoopProxy> file_message_loop_proxy) {
+ core_->SaveResponseToFileAtPath(file_path, file_message_loop_proxy);
+}
+
+void URLFetcherImpl::SaveResponseToTemporaryFile(
+ scoped_refptr<base::MessageLoopProxy> file_message_loop_proxy) {
+ core_->SaveResponseToTemporaryFile(file_message_loop_proxy);
+}
+
+HttpResponseHeaders* URLFetcherImpl::GetResponseHeaders() const {
+ return core_->GetResponseHeaders();
+}
+
+HostPortPair URLFetcherImpl::GetSocketAddress() const {
+ return core_->GetSocketAddress();
+}
+
+bool URLFetcherImpl::WasFetchedViaProxy() const {
+ return core_->WasFetchedViaProxy();
+}
+
+void URLFetcherImpl::Start() {
+ core_->Start();
+}
+
+const GURL& URLFetcherImpl::GetOriginalURL() const {
+ return core_->GetOriginalURL();
+}
+
+const GURL& URLFetcherImpl::GetURL() const {
+ return core_->GetURL();
+}
+
+const URLRequestStatus& URLFetcherImpl::GetStatus() const {
+ return core_->GetStatus();
+}
+
+int URLFetcherImpl::GetResponseCode() const {
+ return core_->GetResponseCode();
+}
+
+const ResponseCookies& URLFetcherImpl::GetCookies() const {
+ return core_->GetCookies();
+}
+
+bool URLFetcherImpl::FileErrorOccurred(
+ base::PlatformFileError* out_error_code) const {
+ return core_->FileErrorOccurred(out_error_code);
+}
+
+void URLFetcherImpl::ReceivedContentWasMalformed() {
+ core_->ReceivedContentWasMalformed();
+}
+
+bool URLFetcherImpl::GetResponseAsString(
+ std::string* out_response_string) const {
+ return core_->GetResponseAsString(out_response_string);
+}
+
+bool URLFetcherImpl::GetResponseAsFilePath(
+ bool take_ownership,
+ FilePath* out_response_path) const {
+ return core_->GetResponseAsFilePath(take_ownership, out_response_path);
+}
+
+// static
+void URLFetcherImpl::CancelAll() {
+ URLFetcherCore::CancelAll();
+}
+
+// static
+void URLFetcherImpl::SetEnableInterceptionForTests(bool enabled) {
+ URLFetcherCore::SetEnableInterceptionForTests(enabled);
+}
+
+// static
+int URLFetcherImpl::GetNumFetcherCores() {
+ return URLFetcherCore::GetNumFetcherCores();
+}
+
+URLFetcherDelegate* URLFetcherImpl::delegate() const {
+ return core_->delegate();
+}
+
+// static
+URLFetcherFactory* URLFetcherImpl::factory() {
+ return g_factory;
+}
+
+// static
+void URLFetcherImpl::set_factory(URLFetcherFactory* factory) {
+ g_factory = factory;
+}
+
+} // namespace net
diff --git a/net/url_request/url_fetcher_impl.h b/net/url_request/url_fetcher_impl.h
new file mode 100644
index 0000000..9076691
--- /dev/null
+++ b/net/url_request/url_fetcher_impl.h
@@ -0,0 +1,123 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains URLFetcher, a wrapper around URLRequest that handles
+// low-level details like thread safety, ref counting, and incremental buffer
+// reading. This is useful for callers who simply want to get the data from a
+// URL and don't care about all the nitty-gritty details.
+//
+// NOTE(willchan): Only one "IO" thread is supported for URLFetcher. This is a
+// temporary situation. We will work on allowing support for multiple "io"
+// threads per process.
+
+#ifndef NET_URL_REQUEST_URL_FETCHER_IMPL_H_
+#define NET_URL_REQUEST_URL_FETCHER_IMPL_H_
+#pragma once
+
+#include "base/basictypes.h"
+#include "base/compiler_specific.h"
+#include "net/base/net_export.h"
+#include "net/url_request/url_fetcher.h"
+
+namespace net {
+class URLFetcherCore;
+class URLFetcherDelegate;
+class URLFetcherFactory;
+
+// TODO(akalin): Remove NET_EXPORT once URLFetcher::Create is in net/.
+class NET_EXPORT URLFetcherImpl : public URLFetcher {
+ public:
+ // |url| is the URL to send the request to.
+ // |request_type| is the type of request to make.
+ // |d| the object that will receive the callback on fetch completion.
+ URLFetcherImpl(const GURL& url,
+ RequestType request_type,
+ URLFetcherDelegate* d);
+ virtual ~URLFetcherImpl();
+
+ // URLFetcher implementation:
+ virtual void SetUploadData(const std::string& upload_content_type,
+ const std::string& upload_content) OVERRIDE;
+ virtual void SetChunkedUpload(
+ const std::string& upload_content_type) OVERRIDE;
+ virtual void AppendChunkToUpload(const std::string& data,
+ bool is_last_chunk) OVERRIDE;
+ virtual void SetLoadFlags(int load_flags) OVERRIDE;
+ virtual int GetLoadFlags() const OVERRIDE;
+ virtual void SetReferrer(const std::string& referrer) OVERRIDE;
+ virtual void SetExtraRequestHeaders(
+ const std::string& extra_request_headers) OVERRIDE;
+ virtual void AddExtraRequestHeader(const std::string& header_line) OVERRIDE;
+ virtual void GetExtraRequestHeaders(
+ HttpRequestHeaders* headers) const OVERRIDE;
+ virtual void SetRequestContext(
+ URLRequestContextGetter* request_context_getter) OVERRIDE;
+ virtual void SetFirstPartyForCookies(
+ const GURL& first_party_for_cookies) OVERRIDE;
+ virtual void SetURLRequestUserData(
+ const void* key,
+ const CreateDataCallback& create_data_callback) OVERRIDE;
+ virtual void SetStopOnRedirect(bool stop_on_redirect) OVERRIDE;
+ virtual void SetAutomaticallyRetryOn5xx(bool retry) OVERRIDE;
+ virtual void SetMaxRetries(int max_retries) OVERRIDE;
+ virtual int GetMaxRetries() const OVERRIDE;
+ virtual base::TimeDelta GetBackoffDelay() const OVERRIDE;
+ virtual void SaveResponseToFileAtPath(
+ const FilePath& file_path,
+ scoped_refptr<base::MessageLoopProxy> file_message_loop_proxy) OVERRIDE;
+ virtual void SaveResponseToTemporaryFile(
+ scoped_refptr<base::MessageLoopProxy> file_message_loop_proxy) OVERRIDE;
+ virtual HttpResponseHeaders* GetResponseHeaders() const OVERRIDE;
+ virtual HostPortPair GetSocketAddress() const OVERRIDE;
+ virtual bool WasFetchedViaProxy() const OVERRIDE;
+ virtual void Start() OVERRIDE;
+ virtual const GURL& GetOriginalURL() const OVERRIDE;
+ virtual const GURL& GetURL() const OVERRIDE;
+ virtual const URLRequestStatus& GetStatus() const OVERRIDE;
+ virtual int GetResponseCode() const OVERRIDE;
+ virtual const ResponseCookies& GetCookies() const OVERRIDE;
+ virtual bool FileErrorOccurred(
+ base::PlatformFileError* out_error_code) const OVERRIDE;
+ virtual void ReceivedContentWasMalformed() OVERRIDE;
+ virtual bool GetResponseAsString(
+ std::string* out_response_string) const OVERRIDE;
+ virtual bool GetResponseAsFilePath(
+ bool take_ownership,
+ FilePath* out_response_path) const OVERRIDE;
+
+ static void CancelAll();
+
+ static void SetEnableInterceptionForTests(bool enabled);
+
+ // TODO(akalin): Make these private again once URLFetcher::Create()
+ // is in net/.
+
+ static URLFetcherFactory* factory();
+
+ // Sets the factory used by the static method Create to create a URLFetcher.
+ // URLFetcher does not take ownership of |factory|. A value of NULL results
+ // in a URLFetcher being created directly.
+ //
+ // NOTE: for safety, this should only be used through ScopedURLFetcherFactory!
+ static void set_factory(URLFetcherFactory* factory);
+
+ protected:
+ // Returns the delegate.
+ URLFetcherDelegate* delegate() const;
+
+ private:
+ friend class URLFetcherTest;
+
+ // Only used by URLFetcherTest, returns the number of URLFetcher::Core objects
+ // actively running.
+ static int GetNumFetcherCores();
+
+ const scoped_refptr<URLFetcherCore> core_;
+
+ DISALLOW_COPY_AND_ASSIGN(URLFetcherImpl);
+};
+
+} // namespace net
+
+#endif // NET_URL_REQUEST_URL_FETCHER_IMPL_H_
diff --git a/net/url_request/url_fetcher_impl_unittest.cc b/net/url_request/url_fetcher_impl_unittest.cc
new file mode 100644
index 0000000..419ec4f
--- /dev/null
+++ b/net/url_request/url_fetcher_impl_unittest.cc
@@ -0,0 +1,1211 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "net/url_request/url_fetcher_impl.h"
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/file_util.h"
+#include "base/message_loop_proxy.h"
+#include "base/scoped_temp_dir.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/thread.h"
+#include "build/build_config.h"
+#include "crypto/nss_util.h"
+#include "net/http/http_response_headers.h"
+#include "net/test/test_server.h"
+#include "net/url_request/url_fetcher_delegate.h"
+#include "net/url_request/url_request_context_getter.h"
+#include "net/url_request/url_request_test_util.h"
+#include "net/url_request/url_request_throttler_manager.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#if defined(USE_NSS)
+#include "net/ocsp/nss_ocsp.h"
+#endif
+
+namespace net {
+
+using base::Time;
+using base::TimeDelta;
+
+// TODO(eroman): Add a regression test for http://crbug.com/40505.
+
+namespace {
+
+// TODO(akalin): Move all the test data to somewhere under net/.
+const FilePath::CharType kDocRoot[] = FILE_PATH_LITERAL("chrome/test/data");
+const char kTestServerFilePrefix[] = "files/";
+
+class ThrottlingTestURLRequestContext : public TestURLRequestContext {
+ public:
+ ThrottlingTestURLRequestContext() : TestURLRequestContext(true) {
+ set_throttler_manager(&throttler_manager_);
+ Init();
+ DCHECK(throttler_manager() != NULL);
+ }
+
+ private:
+ URLRequestThrottlerManager throttler_manager_;
+};
+
+class ThrottlingTestURLRequestContextGetter
+ : public TestURLRequestContextGetter {
+ public:
+ ThrottlingTestURLRequestContextGetter(
+ base::MessageLoopProxy* io_message_loop_proxy,
+ TestURLRequestContext* request_context)
+ : TestURLRequestContextGetter(io_message_loop_proxy),
+ context_(request_context) {
+ }
+
+ virtual TestURLRequestContext* GetURLRequestContext() OVERRIDE {
+ return context_;
+ }
+
+ protected:
+ virtual ~ThrottlingTestURLRequestContextGetter() {}
+
+ TestURLRequestContext* const context_;
+};
+
+} // namespace
+
+class URLFetcherTest : public testing::Test,
+ public URLFetcherDelegate {
+ public:
+ URLFetcherTest()
+ : fetcher_(NULL),
+ context_(new ThrottlingTestURLRequestContext()) {
+ }
+
+ static int GetNumFetcherCores() {
+ return URLFetcherImpl::GetNumFetcherCores();
+ }
+
+ // Creates a URLFetcher, using the program's main thread to do IO.
+ virtual void CreateFetcher(const GURL& url);
+
+ // URLFetcherDelegate
+ // Subclasses that override this should either call this function or
+ // CleanupAfterFetchComplete() at the end of their processing, depending on
+ // whether they want to check for a non-empty HTTP 200 response or not.
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+
+ // Deletes |fetcher| and terminates the message loop.
+ void CleanupAfterFetchComplete();
+
+ scoped_refptr<base::MessageLoopProxy> io_message_loop_proxy() {
+ return io_message_loop_proxy_;
+ }
+
+ TestURLRequestContext* request_context() {
+ return context_.get();
+ }
+
+ protected:
+ virtual void SetUp() OVERRIDE {
+ testing::Test::SetUp();
+
+ io_message_loop_proxy_ = base::MessageLoopProxy::current();
+
+#if defined(USE_NSS)
+ crypto::EnsureNSSInit();
+ EnsureNSSHttpIOInit();
+#endif
+ }
+
+ virtual void TearDown() OVERRIDE {
+#if defined(USE_NSS)
+ ShutdownNSSHttpIO();
+#endif
+ }
+
+ // URLFetcher is designed to run on the main UI thread, but in our tests
+ // we assume that the current thread is the IO thread where the URLFetcher
+ // dispatches its requests to. When we wish to simulate being used from
+ // a UI thread, we dispatch a worker thread to do so.
+ MessageLoopForIO io_loop_;
+ scoped_refptr<base::MessageLoopProxy> io_message_loop_proxy_;
+
+ URLFetcherImpl* fetcher_;
+ const scoped_ptr<TestURLRequestContext> context_;
+};
+
+void URLFetcherTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ fetcher_->Start();
+}
+
+void URLFetcherTest::OnURLFetchComplete(const URLFetcher* source) {
+ EXPECT_TRUE(source->GetStatus().is_success());
+ EXPECT_EQ(200, source->GetResponseCode()); // HTTP OK
+
+ std::string data;
+ EXPECT_TRUE(source->GetResponseAsString(&data));
+ EXPECT_FALSE(data.empty());
+
+ CleanupAfterFetchComplete();
+}
+
+void URLFetcherTest::CleanupAfterFetchComplete() {
+ delete fetcher_; // Have to delete this here and not in the destructor,
+ // because the destructor won't necessarily run on the
+ // same thread that CreateFetcher() did.
+
+ io_message_loop_proxy()->PostTask(FROM_HERE, MessageLoop::QuitClosure());
+ // If the current message loop is not the IO loop, it will be shut down when
+ // the main loop returns and this thread subsequently goes out of scope.
+}
+
+namespace {
+
+// Version of URLFetcherTest that does a POST instead
+class URLFetcherPostTest : public URLFetcherTest {
+ public:
+ // URLFetcherTest override.
+ virtual void CreateFetcher(const GURL& url) OVERRIDE;
+
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+};
+
+// Version of URLFetcherTest that tests download progress reports.
+class URLFetcherDownloadProgressTest : public URLFetcherTest {
+ public:
+ // URLFetcherTest override.
+ virtual void CreateFetcher(const GURL& url) OVERRIDE;
+
+ // URLFetcherDelegate
+ virtual void OnURLFetchDownloadProgress(const URLFetcher* source,
+ int64 current, int64 total) OVERRIDE;
+ protected:
+ int64 previous_progress_;
+ int64 expected_total_;
+};
+
+/// Version of URLFetcherTest that tests progress reports at cancellation.
+class URLFetcherDownloadProgressCancelTest : public URLFetcherTest {
+ public:
+ // URLFetcherTest override.
+ virtual void CreateFetcher(const GURL& url) OVERRIDE;
+
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+ virtual void OnURLFetchDownloadProgress(const URLFetcher* source,
+ int64 current, int64 total) OVERRIDE;
+ protected:
+ bool cancelled_;
+};
+
+// Version of URLFetcherTest that tests upload progress reports.
+class URLFetcherUploadProgressTest : public URLFetcherTest {
+ public:
+ virtual void CreateFetcher(const GURL& url);
+
+ // URLFetcherDelegate
+ virtual void OnURLFetchUploadProgress(const URLFetcher* source,
+ int64 current, int64 total);
+ protected:
+ int64 previous_progress_;
+ std::string chunk_;
+ int64 number_of_chunks_added_;
+};
+
+// Version of URLFetcherTest that tests headers.
+class URLFetcherHeadersTest : public URLFetcherTest {
+ public:
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+};
+
+// Version of URLFetcherTest that tests SocketAddress.
+class URLFetcherSocketAddressTest : public URLFetcherTest {
+ public:
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+ protected:
+ std::string expected_host_;
+ uint16 expected_port_;
+};
+
+// Version of URLFetcherTest that tests stopping on a redirect.
+class URLFetcherStopOnRedirectTest : public URLFetcherTest {
+ public:
+ URLFetcherStopOnRedirectTest();
+ virtual ~URLFetcherStopOnRedirectTest();
+
+ // URLFetcherTest override.
+ virtual void CreateFetcher(const GURL& url) OVERRIDE;
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+
+ protected:
+ // The URL we should be redirected to.
+ static const char* kRedirectTarget;
+
+ bool callback_called_; // Set to true in OnURLFetchComplete().
+};
+
+// Version of URLFetcherTest that tests overload protection.
+class URLFetcherProtectTest : public URLFetcherTest {
+ public:
+ // URLFetcherTest override.
+ virtual void CreateFetcher(const GURL& url) OVERRIDE;
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+ private:
+ Time start_time_;
+};
+
+// Version of URLFetcherTest that tests overload protection, when responses
+// passed through.
+class URLFetcherProtectTestPassedThrough : public URLFetcherTest {
+ public:
+ // URLFetcherTest override.
+ virtual void CreateFetcher(const GURL& url) OVERRIDE;
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+ private:
+ Time start_time_;
+};
+
+// Version of URLFetcherTest that tests bad HTTPS requests.
+class URLFetcherBadHTTPSTest : public URLFetcherTest {
+ public:
+ URLFetcherBadHTTPSTest();
+
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+
+ private:
+ FilePath cert_dir_;
+};
+
+// Version of URLFetcherTest that tests request cancellation on shutdown.
+class URLFetcherCancelTest : public URLFetcherTest {
+ public:
+ // URLFetcherTest override.
+ virtual void CreateFetcher(const GURL& url) OVERRIDE;
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+
+ void CancelRequest();
+};
+
+// Version of TestURLRequestContext that posts a Quit task to the IO
+// thread once it is deleted.
+class CancelTestURLRequestContext : public ThrottlingTestURLRequestContext {
+ public:
+ explicit CancelTestURLRequestContext() {
+ }
+
+ private:
+ virtual ~CancelTestURLRequestContext() {
+ // The d'tor should execute in the IO thread. Post the quit task to the
+ // current thread.
+ MessageLoop::current()->PostTask(FROM_HERE, MessageLoop::QuitClosure());
+ }
+};
+
+class CancelTestURLRequestContextGetter
+ : public TestURLRequestContextGetter {
+ public:
+ CancelTestURLRequestContextGetter(
+ base::MessageLoopProxy* io_message_loop_proxy,
+ const GURL& throttle_for_url)
+ : TestURLRequestContextGetter(io_message_loop_proxy),
+ io_message_loop_proxy_(io_message_loop_proxy),
+ context_created_(false, false),
+ throttle_for_url_(throttle_for_url) {
+ }
+ virtual TestURLRequestContext* GetURLRequestContext() OVERRIDE {
+ if (!context_.get()) {
+ context_.reset(new CancelTestURLRequestContext());
+ DCHECK(context_->throttler_manager());
+
+ // Registers an entry for test url. The backoff time is calculated by:
+ // new_backoff = 2.0 * old_backoff + 0
+ // The initial backoff is 2 seconds and maximum backoff is 4 seconds.
+ // Maximum retries allowed is set to 2.
+ scoped_refptr<URLRequestThrottlerEntry> entry(
+ new URLRequestThrottlerEntry(
+ context_->throttler_manager(),
+ "", 200, 3, 2000, 2.0, 0.0, 4000));
+ context_->throttler_manager()->OverrideEntryForTests(
+ throttle_for_url_, entry);
+
+ context_created_.Signal();
+ }
+ return context_.get();
+ }
+ virtual scoped_refptr<base::MessageLoopProxy> GetIOMessageLoopProxy() const {
+ return io_message_loop_proxy_;
+ }
+ void WaitForContextCreation() {
+ context_created_.Wait();
+ }
+
+ protected:
+ virtual ~CancelTestURLRequestContextGetter() {}
+
+ private:
+ scoped_ptr<TestURLRequestContext> context_;
+ scoped_refptr<base::MessageLoopProxy> io_message_loop_proxy_;
+ base::WaitableEvent context_created_;
+ GURL throttle_for_url_;
+};
+
+// Version of URLFetcherTest that tests retying the same request twice.
+class URLFetcherMultipleAttemptTest : public URLFetcherTest {
+ public:
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+ private:
+ std::string data_;
+};
+
+class URLFetcherFileTest : public URLFetcherTest {
+ public:
+ URLFetcherFileTest() : take_ownership_of_file_(false),
+ expected_file_error_(base::PLATFORM_FILE_OK) {}
+
+ void CreateFetcherForFile(const GURL& url, const FilePath& file_path);
+ void CreateFetcherForTempFile(const GURL& url);
+
+ // URLFetcherDelegate
+ virtual void OnURLFetchComplete(const URLFetcher* source) OVERRIDE;
+
+ protected:
+ FilePath expected_file_;
+ FilePath file_path_;
+
+ // Set by the test. Used in OnURLFetchComplete() to decide if
+ // the URLFetcher should own the temp file, so that we can test
+ // disowning prevents the file from being deleted.
+ bool take_ownership_of_file_;
+
+ // Expected file error code for the test.
+ // PLATFORM_FILE_OK when expecting success.
+ base::PlatformFileError expected_file_error_;
+};
+
+void URLFetcherPostTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::POST, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ fetcher_->SetUploadData("application/x-www-form-urlencoded",
+ "bobsyeruncle");
+ fetcher_->Start();
+}
+
+void URLFetcherPostTest::OnURLFetchComplete(const URLFetcher* source) {
+ std::string data;
+ EXPECT_TRUE(source->GetResponseAsString(&data));
+ EXPECT_EQ(std::string("bobsyeruncle"), data);
+ URLFetcherTest::OnURLFetchComplete(source);
+}
+
+void URLFetcherDownloadProgressTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ previous_progress_ = 0;
+ fetcher_->Start();
+}
+
+void URLFetcherDownloadProgressTest::OnURLFetchDownloadProgress(
+ const URLFetcher* source, int64 current, int64 total) {
+ // Increasing between 0 and total.
+ EXPECT_LE(0, current);
+ EXPECT_GE(total, current);
+ EXPECT_LE(previous_progress_, current);
+ previous_progress_ = current;
+ EXPECT_EQ(expected_total_, total);
+}
+
+void URLFetcherDownloadProgressCancelTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ cancelled_ = false;
+ fetcher_->Start();
+}
+
+void URLFetcherDownloadProgressCancelTest::OnURLFetchDownloadProgress(
+ const URLFetcher* source, int64 current, int64 total) {
+ EXPECT_FALSE(cancelled_);
+ if (!cancelled_) {
+ cancelled_ = true;
+ CleanupAfterFetchComplete();
+ }
+}
+
+void URLFetcherDownloadProgressCancelTest::OnURLFetchComplete(
+ const URLFetcher* source) {
+ // Should have been cancelled.
+ ADD_FAILURE();
+ CleanupAfterFetchComplete();
+}
+
+void URLFetcherUploadProgressTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::POST, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ previous_progress_ = 0;
+ // Large enough data to require more than one read from UploadDataStream.
+ chunk_.assign(1<<16, 'a');
+ // Use chunked upload to wait for a timer event of progress notification.
+ fetcher_->SetChunkedUpload("application/x-www-form-urlencoded");
+ fetcher_->Start();
+ number_of_chunks_added_ = 1;
+ fetcher_->AppendChunkToUpload(chunk_, false);
+}
+
+void URLFetcherUploadProgressTest::OnURLFetchUploadProgress(
+ const URLFetcher* source, int64 current, int64 total) {
+ // Increasing between 0 and total.
+ EXPECT_LE(0, current);
+ EXPECT_GE(static_cast<int64>(chunk_.size()) * number_of_chunks_added_,
+ current);
+ EXPECT_LE(previous_progress_, current);
+ previous_progress_ = current;
+ EXPECT_EQ(-1, total);
+
+ if (number_of_chunks_added_ < 2) {
+ number_of_chunks_added_ += 1;
+ fetcher_->AppendChunkToUpload(chunk_, true);
+ }
+}
+
+void URLFetcherHeadersTest::OnURLFetchComplete(
+ const URLFetcher* source) {
+ std::string header;
+ EXPECT_TRUE(source->GetResponseHeaders()->GetNormalizedHeader("cache-control",
+ &header));
+ EXPECT_EQ("private", header);
+ URLFetcherTest::OnURLFetchComplete(source);
+}
+
+void URLFetcherSocketAddressTest::OnURLFetchComplete(
+ const URLFetcher* source) {
+ EXPECT_EQ("127.0.0.1", source->GetSocketAddress().host());
+ EXPECT_EQ(expected_port_, source->GetSocketAddress().port());
+ URLFetcherTest::OnURLFetchComplete(source);
+}
+
+// static
+const char* URLFetcherStopOnRedirectTest::kRedirectTarget =
+ "http://redirect.target.com";
+
+URLFetcherStopOnRedirectTest::URLFetcherStopOnRedirectTest()
+ : callback_called_(false) {
+}
+
+URLFetcherStopOnRedirectTest::~URLFetcherStopOnRedirectTest() {
+}
+
+void URLFetcherStopOnRedirectTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ fetcher_->SetStopOnRedirect(true);
+ fetcher_->Start();
+}
+
+void URLFetcherStopOnRedirectTest::OnURLFetchComplete(
+ const URLFetcher* source) {
+ callback_called_ = true;
+ EXPECT_EQ(GURL(kRedirectTarget), source->GetURL());
+ EXPECT_EQ(URLRequestStatus::CANCELED, source->GetStatus().status());
+ EXPECT_EQ(301, source->GetResponseCode());
+ CleanupAfterFetchComplete();
+}
+
+void URLFetcherProtectTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ start_time_ = Time::Now();
+ fetcher_->SetMaxRetries(11);
+ fetcher_->Start();
+}
+
+void URLFetcherProtectTest::OnURLFetchComplete(const URLFetcher* source) {
+ const TimeDelta one_second = TimeDelta::FromMilliseconds(1000);
+ if (source->GetResponseCode() >= 500) {
+ // Now running ServerUnavailable test.
+ // It takes more than 1 second to finish all 11 requests.
+ EXPECT_TRUE(Time::Now() - start_time_ >= one_second);
+ EXPECT_TRUE(source->GetStatus().is_success());
+ std::string data;
+ EXPECT_TRUE(source->GetResponseAsString(&data));
+ EXPECT_FALSE(data.empty());
+ CleanupAfterFetchComplete();
+ } else {
+ // Now running Overload test.
+ static int count = 0;
+ count++;
+ if (count < 20) {
+ fetcher_->SetRequestContext(
+ new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ fetcher_->Start();
+ } else {
+ // We have already sent 20 requests continuously. And we expect that
+ // it takes more than 1 second due to the overload protection settings.
+ EXPECT_TRUE(Time::Now() - start_time_ >= one_second);
+ URLFetcherTest::OnURLFetchComplete(source);
+ }
+ }
+}
+
+void URLFetcherProtectTestPassedThrough::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ fetcher_->SetAutomaticallyRetryOn5xx(false);
+ start_time_ = Time::Now();
+ fetcher_->SetMaxRetries(11);
+ fetcher_->Start();
+}
+
+void URLFetcherProtectTestPassedThrough::OnURLFetchComplete(
+ const URLFetcher* source) {
+ const TimeDelta one_minute = TimeDelta::FromMilliseconds(60000);
+ if (source->GetResponseCode() >= 500) {
+ // Now running ServerUnavailable test.
+ // It should get here on the first attempt, so almost immediately and
+ // *not* to attempt to execute all 11 requests (2.5 minutes).
+ EXPECT_TRUE(Time::Now() - start_time_ < one_minute);
+ EXPECT_TRUE(source->GetStatus().is_success());
+ // Check that suggested back off time is bigger than 0.
+ EXPECT_GT(fetcher_->GetBackoffDelay().InMicroseconds(), 0);
+ std::string data;
+ EXPECT_TRUE(source->GetResponseAsString(&data));
+ EXPECT_FALSE(data.empty());
+ } else {
+ // We should not get here!
+ ADD_FAILURE();
+ }
+
+ CleanupAfterFetchComplete();
+}
+
+
+URLFetcherBadHTTPSTest::URLFetcherBadHTTPSTest() {
+ PathService::Get(base::DIR_SOURCE_ROOT, &cert_dir_);
+ cert_dir_ = cert_dir_.AppendASCII("chrome");
+ cert_dir_ = cert_dir_.AppendASCII("test");
+ cert_dir_ = cert_dir_.AppendASCII("data");
+ cert_dir_ = cert_dir_.AppendASCII("ssl");
+ cert_dir_ = cert_dir_.AppendASCII("certificates");
+}
+
+// The "server certificate expired" error should result in automatic
+// cancellation of the request by
+// URLRequest::Delegate::OnSSLCertificateError.
+void URLFetcherBadHTTPSTest::OnURLFetchComplete(
+ const URLFetcher* source) {
+ // This part is different from URLFetcherTest::OnURLFetchComplete
+ // because this test expects the request to be cancelled.
+ EXPECT_EQ(URLRequestStatus::CANCELED, source->GetStatus().status());
+ EXPECT_EQ(ERR_ABORTED, source->GetStatus().error());
+ EXPECT_EQ(-1, source->GetResponseCode());
+ EXPECT_TRUE(source->GetCookies().empty());
+ std::string data;
+ EXPECT_TRUE(source->GetResponseAsString(&data));
+ EXPECT_TRUE(data.empty());
+ CleanupAfterFetchComplete();
+}
+
+void URLFetcherCancelTest::CreateFetcher(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ CancelTestURLRequestContextGetter* context_getter =
+ new CancelTestURLRequestContextGetter(io_message_loop_proxy(),
+ url);
+ fetcher_->SetRequestContext(context_getter);
+ fetcher_->SetMaxRetries(2);
+ fetcher_->Start();
+ // We need to wait for the creation of the URLRequestContext, since we
+ // rely on it being destroyed as a signal to end the test.
+ context_getter->WaitForContextCreation();
+ CancelRequest();
+}
+
+void URLFetcherCancelTest::OnURLFetchComplete(
+ const URLFetcher* source) {
+ // We should have cancelled the request before completion.
+ ADD_FAILURE();
+ CleanupAfterFetchComplete();
+}
+
+void URLFetcherCancelTest::CancelRequest() {
+ delete fetcher_;
+ // The URLFetcher's test context will post a Quit task once it is
+ // deleted. So if this test simply hangs, it means cancellation
+ // did not work.
+}
+
+void URLFetcherMultipleAttemptTest::OnURLFetchComplete(
+ const URLFetcher* source) {
+ EXPECT_TRUE(source->GetStatus().is_success());
+ EXPECT_EQ(200, source->GetResponseCode()); // HTTP OK
+ std::string data;
+ EXPECT_TRUE(source->GetResponseAsString(&data));
+ EXPECT_FALSE(data.empty());
+ if (!data.empty() && data_.empty()) {
+ data_ = data;
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+ fetcher_->Start();
+ } else {
+ EXPECT_EQ(data, data_);
+ CleanupAfterFetchComplete();
+ }
+}
+
+void URLFetcherFileTest::CreateFetcherForFile(const GURL& url,
+ const FilePath& file_path) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+
+ // Use the IO message loop to do the file operations in this test.
+ fetcher_->SaveResponseToFileAtPath(file_path, io_message_loop_proxy());
+ fetcher_->Start();
+}
+
+void URLFetcherFileTest::CreateFetcherForTempFile(const GURL& url) {
+ fetcher_ = new URLFetcherImpl(url, URLFetcher::GET, this);
+ fetcher_->SetRequestContext(new ThrottlingTestURLRequestContextGetter(
+ io_message_loop_proxy(), request_context()));
+
+ // Use the IO message loop to do the file operations in this test.
+ fetcher_->SaveResponseToTemporaryFile(io_message_loop_proxy());
+ fetcher_->Start();
+}
+
+void URLFetcherFileTest::OnURLFetchComplete(const URLFetcher* source) {
+ if (expected_file_error_ == base::PLATFORM_FILE_OK) {
+ EXPECT_TRUE(source->GetStatus().is_success());
+ EXPECT_EQ(source->GetResponseCode(), 200);
+
+ base::PlatformFileError error_code = base::PLATFORM_FILE_OK;
+ EXPECT_FALSE(fetcher_->FileErrorOccurred(&error_code));
+
+ EXPECT_TRUE(source->GetResponseAsFilePath(
+ take_ownership_of_file_, &file_path_));
+
+ EXPECT_TRUE(file_util::ContentsEqual(expected_file_, file_path_));
+ } else {
+ base::PlatformFileError error_code = base::PLATFORM_FILE_OK;
+ EXPECT_TRUE(fetcher_->FileErrorOccurred(&error_code));
+ EXPECT_EQ(expected_file_error_, error_code);
+ }
+ CleanupAfterFetchComplete();
+}
+
+TEST_F(URLFetcherTest, SameThreadsTest) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Create the fetcher on the main thread. Since IO will happen on the main
+ // thread, this will test URLFetcher's ability to do everything on one
+ // thread.
+ CreateFetcher(test_server.GetURL("defaultresponse"));
+
+ MessageLoop::current()->Run();
+}
+
+#if defined(OS_MACOSX)
+// SIGSEGV on Mac: http://crbug.com/60426
+TEST_F(URLFetcherTest, DISABLED_DifferentThreadsTest) {
+#else
+TEST_F(URLFetcherTest, DifferentThreadsTest) {
+#endif
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Create a separate thread that will create the URLFetcher. The current
+ // (main) thread will do the IO, and when the fetch is complete it will
+ // terminate the main thread's message loop; then the other thread's
+ // message loop will be shut down automatically as the thread goes out of
+ // scope.
+ base::Thread t("URLFetcher test thread");
+ ASSERT_TRUE(t.Start());
+ t.message_loop()->PostTask(
+ FROM_HERE,
+ base::Bind(&URLFetcherTest::CreateFetcher,
+ base::Unretained(this),
+ test_server.GetURL("defaultresponse")));
+
+ MessageLoop::current()->Run();
+}
+
+void CancelAllOnIO() {
+ EXPECT_EQ(1, URLFetcherTest::GetNumFetcherCores());
+ URLFetcherImpl::CancelAll();
+ EXPECT_EQ(0, URLFetcherTest::GetNumFetcherCores());
+}
+
+// Tests to make sure CancelAll() will successfully cancel existing URLFetchers.
+TEST_F(URLFetcherTest, CancelAll) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+ EXPECT_EQ(0, GetNumFetcherCores());
+
+ CreateFetcher(test_server.GetURL("defaultresponse"));
+ io_message_loop_proxy()->PostTaskAndReply(
+ FROM_HERE,
+ base::Bind(&CancelAllOnIO),
+ MessageLoop::QuitClosure());
+ MessageLoop::current()->Run();
+ EXPECT_EQ(0, GetNumFetcherCores());
+ delete fetcher_;
+}
+
+#if defined(OS_MACOSX)
+// SIGSEGV on Mac: http://crbug.com/60426
+TEST_F(URLFetcherPostTest, DISABLED_Basic) {
+#else
+TEST_F(URLFetcherPostTest, Basic) {
+#endif
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ CreateFetcher(test_server.GetURL("echo"));
+ MessageLoop::current()->Run();
+}
+
+#if defined(OS_MACOSX)
+// SIGSEGV on Mac: http://crbug.com/60426
+TEST_F(URLFetcherUploadProgressTest, DISABLED_Basic) {
+#else
+TEST_F(URLFetcherUploadProgressTest, Basic) {
+#endif
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ CreateFetcher(test_server.GetURL("echo"));
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherDownloadProgressTest, Basic) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Get a file large enough to require more than one read into
+ // URLFetcher::Core's IOBuffer.
+ static const char kFileToFetch[] = "animate1.gif";
+ file_util::GetFileSize(test_server.document_root().AppendASCII(kFileToFetch),
+ &expected_total_);
+ CreateFetcher(test_server.GetURL(
+ std::string(kTestServerFilePrefix) + kFileToFetch));
+
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherDownloadProgressCancelTest, CancelWhileProgressReport) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Get a file large enough to require more than one read into
+ // URLFetcher::Core's IOBuffer.
+ static const char kFileToFetch[] = "animate1.gif";
+ CreateFetcher(test_server.GetURL(
+ std::string(kTestServerFilePrefix) + kFileToFetch));
+
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherHeadersTest, Headers) {
+ TestServer test_server(
+ TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(FILE_PATH_LITERAL("net/data/url_request_unittest")));
+ ASSERT_TRUE(test_server.Start());
+
+ CreateFetcher(test_server.GetURL("files/with-headers.html"));
+ MessageLoop::current()->Run();
+ // The actual tests are in the URLFetcherHeadersTest fixture.
+}
+
+TEST_F(URLFetcherSocketAddressTest, SocketAddress) {
+ TestServer test_server(
+ TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(FILE_PATH_LITERAL("net/data/url_request_unittest")));
+ ASSERT_TRUE(test_server.Start());
+ expected_port_ = test_server.host_port_pair().port();
+
+ // Reusing "with-headers.html" but doesn't really matter.
+ CreateFetcher(test_server.GetURL("files/with-headers.html"));
+ MessageLoop::current()->Run();
+ // The actual tests are in the URLFetcherSocketAddressTest fixture.
+}
+
+TEST_F(URLFetcherStopOnRedirectTest, StopOnRedirect) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ CreateFetcher(
+ test_server.GetURL(std::string("server-redirect?") + kRedirectTarget));
+ MessageLoop::current()->Run();
+ EXPECT_TRUE(callback_called_);
+}
+
+TEST_F(URLFetcherProtectTest, Overload) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ GURL url(test_server.GetURL("defaultresponse"));
+
+ // Registers an entry for test url. It only allows 3 requests to be sent
+ // in 200 milliseconds.
+ scoped_refptr<URLRequestThrottlerEntry> entry(
+ new URLRequestThrottlerEntry(
+ request_context()->throttler_manager(),
+ "", 200, 3, 1, 2.0, 0.0, 256));
+ request_context()->throttler_manager()->OverrideEntryForTests(url, entry);
+
+ CreateFetcher(url);
+
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherProtectTest, ServerUnavailable) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ GURL url(test_server.GetURL("files/server-unavailable.html"));
+
+ // Registers an entry for test url. The backoff time is calculated by:
+ // new_backoff = 2.0 * old_backoff + 0
+ // and maximum backoff time is 256 milliseconds.
+ // Maximum retries allowed is set to 11.
+ scoped_refptr<URLRequestThrottlerEntry> entry(
+ new URLRequestThrottlerEntry(
+ request_context()->throttler_manager(),
+ "", 200, 3, 1, 2.0, 0.0, 256));
+ request_context()->throttler_manager()->OverrideEntryForTests(url, entry);
+
+ CreateFetcher(url);
+
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherProtectTestPassedThrough, ServerUnavailablePropagateResponse) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ GURL url(test_server.GetURL("files/server-unavailable.html"));
+
+ // Registers an entry for test url. The backoff time is calculated by:
+ // new_backoff = 2.0 * old_backoff + 0
+ // and maximum backoff time is 150000 milliseconds.
+ // Maximum retries allowed is set to 11.
+ scoped_refptr<URLRequestThrottlerEntry> entry(
+ new URLRequestThrottlerEntry(
+ request_context()->throttler_manager(),
+ "", 200, 3, 100, 2.0, 0.0, 150000));
+ // Total time if *not* for not doing automatic backoff would be 150s.
+ // In reality it should be "as soon as server responds".
+ request_context()->throttler_manager()->OverrideEntryForTests(url, entry);
+
+ CreateFetcher(url);
+
+ MessageLoop::current()->Run();
+}
+
+#if defined(OS_MACOSX)
+// SIGSEGV on Mac: http://crbug.com/60426
+TEST_F(URLFetcherBadHTTPSTest, DISABLED_BadHTTPSTest) {
+#else
+TEST_F(URLFetcherBadHTTPSTest, BadHTTPSTest) {
+#endif
+ TestServer::HTTPSOptions https_options(
+ TestServer::HTTPSOptions::CERT_EXPIRED);
+ TestServer test_server(https_options, FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ CreateFetcher(test_server.GetURL("defaultresponse"));
+ MessageLoop::current()->Run();
+}
+
+#if defined(OS_MACOSX)
+// SIGSEGV on Mac: http://crbug.com/60426
+TEST_F(URLFetcherCancelTest, DISABLED_ReleasesContext) {
+#else
+TEST_F(URLFetcherCancelTest, ReleasesContext) {
+#endif
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ GURL url(test_server.GetURL("files/server-unavailable.html"));
+
+ // Create a separate thread that will create the URLFetcher. The current
+ // (main) thread will do the IO, and when the fetch is complete it will
+ // terminate the main thread's message loop; then the other thread's
+ // message loop will be shut down automatically as the thread goes out of
+ // scope.
+ base::Thread t("URLFetcher test thread");
+ ASSERT_TRUE(t.Start());
+ t.message_loop()->PostTask(
+ FROM_HERE,
+ base::Bind(&URLFetcherCancelTest::CreateFetcher,
+ base::Unretained(this), url));
+
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherCancelTest, CancelWhileDelayedStartTaskPending) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ GURL url(test_server.GetURL("files/server-unavailable.html"));
+
+ // Register an entry for test url.
+ // Using a sliding window of 4 seconds, and max of 1 request, under a fast
+ // run we expect to have a 4 second delay when posting the Start task.
+ scoped_refptr<URLRequestThrottlerEntry> entry(
+ new URLRequestThrottlerEntry(
+ request_context()->throttler_manager(),
+ "", 4000, 1, 2000, 2.0, 0.0, 4000));
+ request_context()->throttler_manager()->OverrideEntryForTests(url, entry);
+ // Fake that a request has just started.
+ entry->ReserveSendingTimeForNextRequest(base::TimeTicks());
+
+ // The next request we try to send will be delayed by ~4 seconds.
+ // The slower the test runs, the less the delay will be (since it takes the
+ // time difference from now).
+
+ base::Thread t("URLFetcher test thread");
+ ASSERT_TRUE(t.Start());
+ t.message_loop()->PostTask(
+ FROM_HERE,
+ base::Bind(&URLFetcherTest::CreateFetcher, base::Unretained(this), url));
+
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherMultipleAttemptTest, SameData) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Create the fetcher on the main thread. Since IO will happen on the main
+ // thread, this will test URLFetcher's ability to do everything on one
+ // thread.
+ CreateFetcher(test_server.GetURL("defaultresponse"));
+
+ MessageLoop::current()->Run();
+}
+
+TEST_F(URLFetcherFileTest, SmallGet) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+
+ // Get a small file.
+ static const char kFileToFetch[] = "simple.html";
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ CreateFetcherForFile(
+ test_server.GetURL(std::string(kTestServerFilePrefix) + kFileToFetch),
+ temp_dir.path().AppendASCII(kFileToFetch));
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+
+ ASSERT_FALSE(file_util::PathExists(file_path_))
+ << file_path_.value() << " not removed.";
+}
+
+TEST_F(URLFetcherFileTest, LargeGet) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+
+ // Get a file large enough to require more than one read into
+ // URLFetcher::Core's IOBuffer.
+ static const char kFileToFetch[] = "animate1.gif";
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ CreateFetcherForFile(
+ test_server.GetURL(std::string(kTestServerFilePrefix) + kFileToFetch),
+ temp_dir.path().AppendASCII(kFileToFetch));
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+}
+
+TEST_F(URLFetcherFileTest, CanTakeOwnershipOfFile) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+
+ // Get a small file.
+ static const char kFileToFetch[] = "simple.html";
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ CreateFetcherForFile(
+ test_server.GetURL(std::string(kTestServerFilePrefix) + kFileToFetch),
+ temp_dir.path().AppendASCII(kFileToFetch));
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+
+ MessageLoop::current()->RunAllPending();
+ ASSERT_FALSE(file_util::PathExists(file_path_))
+ << file_path_.value() << " not removed.";
+}
+
+
+TEST_F(URLFetcherFileTest, OverwriteExistingFile) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+
+ // Create a file before trying to fetch.
+ static const char kFileToFetch[] = "simple.html";
+ static const char kData[] = "abcdefghijklmnopqrstuvwxyz";
+ file_path_ = temp_dir.path().AppendASCII(kFileToFetch);
+ const int data_size = arraysize(kData);
+ ASSERT_EQ(file_util::WriteFile(file_path_, kData, data_size), data_size);
+ ASSERT_TRUE(file_util::PathExists(file_path_));
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ ASSERT_FALSE(file_util::ContentsEqual(file_path_, expected_file_));
+
+ // Get a small file.
+ CreateFetcherForFile(
+ test_server.GetURL(std::string(kTestServerFilePrefix) + kFileToFetch),
+ file_path_);
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+}
+
+TEST_F(URLFetcherFileTest, TryToOverwriteDirectory) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+
+ // Create a directory before trying to fetch.
+ static const char kFileToFetch[] = "simple.html";
+ file_path_ = temp_dir.path().AppendASCII(kFileToFetch);
+ ASSERT_TRUE(file_util::CreateDirectory(file_path_));
+ ASSERT_TRUE(file_util::PathExists(file_path_));
+
+ // Get a small file.
+ expected_file_error_ = base::PLATFORM_FILE_ERROR_ACCESS_DENIED;
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ CreateFetcherForFile(
+ test_server.GetURL(std::string(kTestServerFilePrefix) + kFileToFetch),
+ file_path_);
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+
+ MessageLoop::current()->RunAllPending();
+}
+
+TEST_F(URLFetcherFileTest, SmallGetToTempFile) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Get a small file.
+ static const char kFileToFetch[] = "simple.html";
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ CreateFetcherForTempFile(
+ test_server.GetURL(std::string(kTestServerFilePrefix) + kFileToFetch));
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+
+ ASSERT_FALSE(file_util::PathExists(file_path_))
+ << file_path_.value() << " not removed.";
+}
+
+TEST_F(URLFetcherFileTest, LargeGetToTempFile) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Get a file large enough to require more than one read into
+ // URLFetcher::Core's IOBuffer.
+ static const char kFileToFetch[] = "animate1.gif";
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ CreateFetcherForTempFile(test_server.GetURL(
+ std::string(kTestServerFilePrefix) + kFileToFetch));
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+}
+
+TEST_F(URLFetcherFileTest, CanTakeOwnershipOfTempFile) {
+ TestServer test_server(TestServer::TYPE_HTTP,
+ TestServer::kLocalhost,
+ FilePath(kDocRoot));
+ ASSERT_TRUE(test_server.Start());
+
+ // Get a small file.
+ static const char kFileToFetch[] = "simple.html";
+ expected_file_ = test_server.document_root().AppendASCII(kFileToFetch);
+ CreateFetcherForTempFile(test_server.GetURL(
+ std::string(kTestServerFilePrefix) + kFileToFetch));
+
+ MessageLoop::current()->Run(); // OnURLFetchComplete() will Quit().
+
+ MessageLoop::current()->RunAllPending();
+ ASSERT_FALSE(file_util::PathExists(file_path_))
+ << file_path_.value() << " not removed.";
+}
+
+} // namespace
+
+} // namespace net