summaryrefslogtreecommitdiffstats
path: root/net/proxy
diff options
context:
space:
mode:
authorericroman@google.com <ericroman@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2008-12-10 09:03:15 +0000
committerericroman@google.com <ericroman@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2008-12-10 09:03:15 +0000
commit677c9057fafb4c263f001ded0e173075a945a4c2 (patch)
treeca4b253831c082fe4a7ccaba2ba3cae6f264615a /net/proxy
parent1b9cad81c98ec37ce2f6b4c3ef47571efc61bd3d (diff)
downloadchromium_src-677c9057fafb4c263f001ded0e173075a945a4c2.zip
chromium_src-677c9057fafb4c263f001ded0e173075a945a4c2.tar.gz
chromium_src-677c9057fafb4c263f001ded0e173075a945a4c2.tar.bz2
Add a ProxyScriptFetcher class for doing asynch downloads of PAC scripts.This object will be owned by ProxyService. It will be used to manage the fetching of PAC scripts (on the IO thread, using the primary URLRequestContext).BUG=74,2764 (partial)
Review URL: http://codereview.chromium.org/13251 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@6699 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/proxy')
-rw-r--r--net/proxy/proxy_script_fetcher.cc301
-rw-r--r--net/proxy/proxy_script_fetcher.h67
-rw-r--r--net/proxy/proxy_script_fetcher_unittest.cc294
3 files changed, 662 insertions, 0 deletions
diff --git a/net/proxy/proxy_script_fetcher.cc b/net/proxy/proxy_script_fetcher.cc
new file mode 100644
index 0000000..4f9f603
--- /dev/null
+++ b/net/proxy/proxy_script_fetcher.cc
@@ -0,0 +1,301 @@
+// Copyright (c) 2008 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
+
+#include "net/proxy/proxy_script_fetcher.h"
+
+#include "base/compiler_specific.h"
+#include "base/message_loop.h"
+#include "base/string_util.h"
+#include "net/base/load_flags.h"
+#include "net/url_request/url_request.h"
+
+// TODO(eroman):
+// - Support auth-prompts.
+
+namespace net {
+
+namespace {
+
+// The maximum size (in bytes) allowed for a PAC script. Responses exceeding
+// this will fail with ERR_FILE_TOO_BIG.
+int max_response_bytes = 1048576; // 1 megabyte
+
+// The maximum duration (in milliseconds) allowed for fetching the PAC script.
+// Responses exceeding this will fail with ERR_TIMED_OUT.
+int max_duration_ms = 300000; // 5 minutes
+
+} // namespace
+
+class ProxyScriptFetcherImpl : public ProxyScriptFetcher,
+ public URLRequest::Delegate {
+ public:
+ // Creates a ProxyScriptFetcher that issues requests through
+ // |url_request_context|. |url_request_context| must remain valid for the
+ // lifetime of ProxyScriptFetcherImpl.
+ explicit ProxyScriptFetcherImpl(URLRequestContext* url_request_context);
+
+ virtual ~ProxyScriptFetcherImpl();
+
+ // ProxyScriptFetcher methods:
+
+ virtual void Fetch(const GURL& url, std::string* bytes,
+ CompletionCallback* callback);
+ virtual void Cancel();
+
+ // URLRequest::Delegate methods:
+
+ virtual void OnAuthRequired(URLRequest* request,
+ AuthChallengeInfo* auth_info);
+ virtual void OnSSLCertificateError(URLRequest* request, int cert_error,
+ X509Certificate* cert);
+ virtual void OnReceivedRedirect(URLRequest* request, const GURL& to_url);
+ virtual void OnResponseStarted(URLRequest* request);
+ virtual void OnReadCompleted(URLRequest* request, int num_bytes);
+ virtual void OnResponseCompleted(URLRequest* request);
+
+ private:
+ // Read more bytes from the response.
+ void ReadBody(URLRequest* request);
+
+ // Called once the request has completed to notify the caller of
+ // |response_code_| and |response_bytes_|.
+ void FetchCompleted();
+
+ // Clear out the state for the current request.
+ void ResetCurRequestState();
+
+ // Callback for time-out task of request with id |id|.
+ void OnTimeout(int id);
+
+ // Factory for creating the time-out task. This takes care of revoking
+ // outstanding tasks when |this| is deleted.
+ ScopedRunnableMethodFactory<ProxyScriptFetcherImpl> task_factory_;
+
+ // The context used for making network requests.
+ URLRequestContext* url_request_context_;
+
+ // Buffer that URLRequest writes into.
+ enum { kBufSize = 4096 };
+ char buf_[kBufSize];
+
+ // The next ID to use for |cur_request_| (monotonically increasing).
+ int next_id_;
+
+ // The current (in progress) request, or NULL.
+ scoped_ptr<URLRequest> cur_request_;
+
+ // State for current request (only valid when |cur_request_| is not NULL):
+
+ // Unique ID for the current request.
+ int cur_request_id_;
+
+ // Callback to invoke on completion of the fetch.
+ CompletionCallback* callback_;
+
+ // Holds the error condition that was hit on the current request, or OK.
+ int result_code_;
+
+ // Holds the bytes read so far. Will not exceed |max_response_bytes|. This
+ // buffer is owned by the owner of |callback|.
+ std::string* result_bytes_;
+};
+
+ProxyScriptFetcherImpl::ProxyScriptFetcherImpl(
+ URLRequestContext* url_request_context)
+ : ALLOW_THIS_IN_INITIALIZER_LIST(task_factory_(this)),
+ url_request_context_(url_request_context),
+ next_id_(0),
+ cur_request_(NULL),
+ cur_request_id_(0),
+ callback_(NULL),
+ result_code_(OK),
+ result_bytes_(NULL) {
+ DCHECK(url_request_context);
+}
+
+ProxyScriptFetcherImpl::~ProxyScriptFetcherImpl() {
+ // The URLRequest's destructor will cancel the outstanding request, and
+ // ensure that the delegate (this) is not called again.
+}
+
+void ProxyScriptFetcherImpl::Fetch(const GURL& url,
+ std::string* bytes,
+ CompletionCallback* callback) {
+ // It is invalid to call Fetch() while a request is already in progress.
+ DCHECK(!cur_request_.get());
+
+ DCHECK(callback);
+ DCHECK(bytes);
+
+ cur_request_.reset(new URLRequest(url, this));
+ cur_request_->set_context(url_request_context_);
+ cur_request_->set_method("GET");
+
+ // Make sure that the PAC script is downloaded using a direct connection,
+ // to avoid circular dependencies (fetching is a part of proxy resolution).
+ cur_request_->set_load_flags(LOAD_BYPASS_PROXY);
+
+ // Save the caller's info for notification on completion.
+ callback_ = callback;
+ result_bytes_ = bytes;
+ result_bytes_->clear();
+
+ // Post a task to timeout this request if it takes too long.
+ cur_request_id_ = ++next_id_;
+ MessageLoop::current()->PostDelayedTask(FROM_HERE,
+ task_factory_.NewRunnableMethod(&ProxyScriptFetcherImpl::OnTimeout,
+ cur_request_id_),
+ static_cast<int>(max_duration_ms));
+
+ // Start the request.
+ cur_request_->Start();
+}
+
+void ProxyScriptFetcherImpl::Cancel() {
+ // ResetCurRequestState will free the URLRequest, which will cause
+ // cancellation.
+ ResetCurRequestState();
+}
+
+void ProxyScriptFetcherImpl::OnAuthRequired(URLRequest* request,
+ AuthChallengeInfo* auth_info) {
+ DCHECK(request == cur_request_.get());
+ // TODO(eroman):
+ result_code_ = ERR_NOT_IMPLEMENTED;
+ request->CancelAuth();
+}
+
+void ProxyScriptFetcherImpl::OnSSLCertificateError(URLRequest* request,
+ int cert_error,
+ X509Certificate* cert) {
+ DCHECK(request == cur_request_.get());
+ // Certificate errors are in same space as net errors.
+ result_code_ = cert_error;
+ request->Cancel();
+}
+
+void ProxyScriptFetcherImpl::OnReceivedRedirect(URLRequest* request,
+ const GURL& to_url) {
+ DCHECK(request == cur_request_.get());
+ // OK, thanks for telling.
+}
+
+void ProxyScriptFetcherImpl::OnResponseStarted(URLRequest* request) {
+ DCHECK(request == cur_request_.get());
+
+ if (!request->status().is_success()) {
+ OnResponseCompleted(request);
+ return;
+ }
+
+ // Require HTTP responses to have a success status code.
+ if (request->url().SchemeIs("http") || request->url().SchemeIs("https")) {
+ // NOTE about mime types: We do not enforce mime types on PAC files.
+ // This is for compatibility with {IE 7, Firefox 3, Opera 9.5}
+
+ // NOTE about status codes: We are like Firefox 3 in this respect.
+ // {IE 7, Safari 3, Opera 9.5} do not care about the status code.
+ if (request->GetResponseCode() != 200) {
+ result_code_ = ERR_PAC_STATUS_NOT_OK;
+ request->Cancel();
+ return;
+ }
+ }
+
+ ReadBody(request);
+}
+
+void ProxyScriptFetcherImpl::OnReadCompleted(URLRequest* request,
+ int num_bytes) {
+ DCHECK(request == cur_request_.get());
+ if (num_bytes > 0) {
+ // Enforce maximum size bound.
+ if (num_bytes + result_bytes_->size() >
+ static_cast<size_t>(max_response_bytes)) {
+ result_code_ = ERR_FILE_TOO_BIG;
+ request->Cancel();
+ return;
+ }
+ result_bytes_->append(buf_, num_bytes);
+ ReadBody(request);
+ } else { // Error while reading, or EOF
+ OnResponseCompleted(request);
+ }
+}
+
+void ProxyScriptFetcherImpl::OnResponseCompleted(URLRequest* request) {
+ DCHECK(request == cur_request_.get());
+
+ // Use |result_code_| as the request's error if we have already set it to
+ // something specific.
+ if (result_code_ == OK && !request->status().is_success())
+ result_code_ = request->status().os_error();
+
+ FetchCompleted();
+}
+
+void ProxyScriptFetcherImpl::ReadBody(URLRequest* request) {
+ int num_bytes;
+ if (request->Read(buf_, kBufSize, &num_bytes)) {
+ OnReadCompleted(request, num_bytes);
+ } else if (!request->status().is_io_pending()) {
+ // Read failed synchronously.
+ OnResponseCompleted(request);
+ }
+}
+
+void ProxyScriptFetcherImpl::FetchCompleted() {
+ // On error, the caller expects empty string for bytes.
+ if (result_code_ != OK)
+ result_bytes_->clear();
+
+ int result_code = result_code_;
+ CompletionCallback* callback = callback_;
+
+ ResetCurRequestState();
+
+ callback->Run(result_code);
+}
+
+void ProxyScriptFetcherImpl::ResetCurRequestState() {
+ cur_request_.reset();
+ cur_request_id_ = 0;
+ callback_ = NULL;
+ result_code_ = OK;
+ result_bytes_ = NULL;
+}
+
+void ProxyScriptFetcherImpl::OnTimeout(int id) {
+ // Timeout tasks may outlive the URLRequest they reference. Make sure it
+ // is still applicable.
+ if (cur_request_id_ != id)
+ return;
+
+ DCHECK(cur_request_.get());
+ result_code_ = ERR_TIMED_OUT;
+ cur_request_->Cancel();
+}
+
+// static
+ProxyScriptFetcher* ProxyScriptFetcher::Create(
+ URLRequestContext* url_request_context) {
+ return new ProxyScriptFetcherImpl(url_request_context);
+}
+
+// static
+int ProxyScriptFetcher::SetTimeoutConstraintForUnittest(
+ int timeout_ms) {
+ int prev = max_duration_ms;
+ max_duration_ms = timeout_ms;
+ return prev;
+}
+
+// static
+size_t ProxyScriptFetcher::SetSizeConstraintForUnittest(size_t size_bytes) {
+ size_t prev = max_response_bytes;
+ max_response_bytes = size_bytes;
+ return prev;
+}
+
+} // namespace net
diff --git a/net/proxy/proxy_script_fetcher.h b/net/proxy/proxy_script_fetcher.h
new file mode 100644
index 0000000..fddbd7b
--- /dev/null
+++ b/net/proxy/proxy_script_fetcher.h
@@ -0,0 +1,67 @@
+// Copyright (c) 2008 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
+
+// ProxyScriptFetcher is an async interface for fetching a proxy auto config
+// script. It is specific to fetching a PAC script; enforces timeout, max-size,
+// status code.
+
+#ifndef NET_PROXY_PROXY_SCRIPT_FETCHER_H_
+#define NET_PROXY_PROXY_SCRIPT_FETCHER_H_
+
+#include "net/base/completion_callback.h"
+#include "testing/gtest/include/gtest/gtest_prod.h"
+
+class GURL;
+class URLRequestContext;
+
+namespace net {
+
+class ProxyScriptFetcher {
+ public:
+ // Destruction should cancel any outstanding requests.
+ virtual ~ProxyScriptFetcher() {}
+
+ // Downloads the given PAC URL, and invokes |callback| on completion.
+ // On success |callback| is executed with a result code of OK, and a
+ // string of the response bytes. On failure, the result bytes is an empty
+ // string, and the result code is a network error. Some special network
+ // errors that may occur are:
+ //
+ // ERR_TIMED_OUT -- the fetch took too long to complete.
+ // ERR_FILE_TOO_BIG -- the response's body was too large.
+ // ERR_PAC_STATUS_NOT_OK -- non-200 HTTP status code.
+ // ERR_NOT_IMPLEMENTED -- the response required authentication.
+ //
+ // If the request is cancelled (either using the "Cancel()" method or by
+ // deleting |this|), then no callback is invoked.
+ //
+ // Only one fetch is allowed to be outstanding at a time.
+ virtual void Fetch(const GURL& url, std::string* bytes,
+ CompletionCallback* callback) = 0;
+
+ // Aborts the in-progress fetch (if any).
+ virtual void Cancel() = 0;
+
+ // Create a ProxyScriptFetcher that uses |url_request_context|.
+ static ProxyScriptFetcher* Create(URLRequestContext* url_request_context);
+
+ // --------------------------------------------------------------------------
+ // Testing helpers (only available to unit-tests).
+ // --------------------------------------------------------------------------
+ private:
+ FRIEND_TEST(ProxyScriptFetcherTest, Hang);
+ FRIEND_TEST(ProxyScriptFetcherTest, TooLarge);
+
+ // Sets the maximum duration for a fetch to |timeout_ms|. Returns the previous
+ // bound.
+ static int SetTimeoutConstraintForUnittest(int timeout_ms);
+
+ // Sets the maximum response size for a fetch to |size_bytes|. Returns the
+ // previous bound.
+ static size_t SetSizeConstraintForUnittest(size_t size_bytes);
+};
+
+} // namespace net
+
+#endif // NET_PROXY_PROXY_SCRIPT_FETCHER_H_
diff --git a/net/proxy/proxy_script_fetcher_unittest.cc b/net/proxy/proxy_script_fetcher_unittest.cc
new file mode 100644
index 0000000..73476ed
--- /dev/null
+++ b/net/proxy/proxy_script_fetcher_unittest.cc
@@ -0,0 +1,294 @@
+// Copyright (c) 2008 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "net/proxy/proxy_script_fetcher.h"
+
+#include "base/file_path.h"
+#include "base/compiler_specific.h"
+#include "base/path_service.h"
+#include "net/base/net_util.h"
+#include "net/url_request/url_request_unittest.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+// TODO(eroman):
+// - Test canceling an outstanding request.
+// - Test deleting ProxyScriptFetcher while a request is in progress.
+
+const wchar_t kDocRoot[] = L"net/data/proxy_script_fetcher_unittest";
+
+struct FetchResult {
+ int code;
+ std::string bytes;
+};
+
+// A non-mock URL request which can access http:// and file:// urls.
+class RequestContext : public URLRequestContext {
+ public:
+ RequestContext() {
+ net::ProxyInfo no_proxy;
+ proxy_service_ = net::ProxyService::Create(&no_proxy);
+ http_transaction_factory_ = net::HttpNetworkLayer::CreateFactory(
+ proxy_service_);
+ }
+ ~RequestContext() {
+ delete http_transaction_factory_;
+ delete proxy_service_;
+ }
+};
+
+// Helper for doing synch fetches. This object lives in SynchFetcher's
+// |io_thread_| and communicates with SynchFetcher though (|result|, |event|).
+class SynchFetcherThreadHelper {
+ public:
+ SynchFetcherThreadHelper(base::WaitableEvent* event, FetchResult* result)
+ : event_(event),
+ fetch_result_(result),
+ url_request_context_(NULL),
+ fetcher_(NULL),
+ ALLOW_THIS_IN_INITIALIZER_LIST(
+ callback_(this, &SynchFetcherThreadHelper::OnFetchCompletion)) {
+ url_request_context_ = new RequestContext;
+ fetcher_.reset(net::ProxyScriptFetcher::Create(url_request_context_.get()));
+ }
+
+ // Starts fetching the script at |url|. Upon completion |event_| will be
+ // signalled, and the bytes read will have been written to |fetch_result_|.
+ void Start(const GURL& url) {
+ fetcher_->Fetch(url, &fetch_result_->bytes, &callback_);
+ }
+
+ void OnFetchCompletion(int result) {
+ fetch_result_->code = result;
+ event_->Signal();
+ }
+
+ private:
+ base::WaitableEvent* event_;
+ FetchResult* fetch_result_;
+
+ scoped_refptr<URLRequestContext> url_request_context_;
+
+ scoped_ptr<net::ProxyScriptFetcher> fetcher_;
+ net::CompletionCallbackImpl<SynchFetcherThreadHelper> callback_;
+};
+
+// Helper that wraps ProxyScriptFetcher::Fetch() with a synchronous interface.
+// It executes Fetch() on a helper thread (IO_Thread).
+class SynchFetcher {
+ public:
+ SynchFetcher()
+ : event_(false, false),
+ io_thread_("IO_Thread"),
+ thread_helper_(NULL) {
+ // Start an IO thread.
+ base::Thread::Options options;
+ options.message_loop_type = MessageLoop::TYPE_IO;
+ io_thread_.StartWithOptions(options);
+
+ // Initialize the state in |io_thread_|.
+ io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod(
+ this, &SynchFetcher::Init));
+ Wait();
+ }
+
+ ~SynchFetcher() {
+ // Tear down the state in |io_thread_|.
+ io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod(
+ this, &SynchFetcher::Cleanup));
+ Wait();
+ }
+
+ // Synchronously fetch the url.
+ FetchResult Fetch(const GURL& url) {
+ io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod(
+ this, &SynchFetcher::AsynchFetch, url));
+ Wait();
+ return fetch_result_;
+ }
+
+ private:
+ // [Runs on |io_thread_|] Allocates the URLRequestContext and the
+ // ProxyScriptFetcher, which live inside |thread_helper_|.
+ void Init() {
+ thread_helper_ = new SynchFetcherThreadHelper(&event_, &fetch_result_);
+ event_.Signal();
+ }
+
+ // [Runs on |io_thread_|] Signals |event_| on completion.
+ void AsynchFetch(const GURL& url) {
+ thread_helper_->Start(url);
+ }
+
+ // [Runs on |io_thread_|] Signals |event_| on cleanup completion.
+ void Cleanup() {
+ delete thread_helper_;
+ thread_helper_ = NULL;
+ MessageLoop::current()->RunAllPending();
+ event_.Signal();
+ }
+
+ void Wait() {
+ event_.Wait();
+ event_.Reset();
+ }
+
+ base::WaitableEvent event_;
+ base::Thread io_thread_;
+ FetchResult fetch_result_;
+ // Holds all the state that lives on the IO thread, for easy cleanup.
+ SynchFetcherThreadHelper* thread_helper_;
+};
+
+// Template specialization so SynchFetcher does not have to be refcounted.
+template<>
+void RunnableMethodTraits<SynchFetcher>::RetainCallee(SynchFetcher* remover) {}
+template<>
+void RunnableMethodTraits<SynchFetcher>::ReleaseCallee(SynchFetcher* remover) {}
+
+// Required to be in net namespace by FRIEND_TEST.
+namespace net {
+
+// Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest.
+GURL GetTestFileUrl(const std::string& relpath) {
+ FilePath path;
+ PathService::Get(base::DIR_SOURCE_ROOT, &path);
+ path = path.Append(FILE_PATH_LITERAL("net"));
+ path = path.Append(FILE_PATH_LITERAL("data"));
+ path = path.Append(FILE_PATH_LITERAL("proxy_script_fetcher_unittest"));
+ GURL base_url = net::FilePathToFileURL(path);
+ return GURL(base_url.spec() + "/" + relpath);
+}
+
+TEST(ProxyScriptFetcherTest, FileUrl) {
+ SynchFetcher pac_fetcher;
+
+ { // Fetch a non-existent file.
+ FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"));
+ EXPECT_EQ(net::ERR_FILE_NOT_FOUND, result.code);
+ EXPECT_TRUE(result.bytes.empty());
+ }
+ { // Fetch a file that exists.
+ FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"));
+ EXPECT_EQ(net::OK, result.code);
+ EXPECT_EQ("-pac.txt-\n", result.bytes);
+ }
+}
+
+// Note that all mime types are allowed for PAC file, to be consistent
+// with other browsers.
+TEST(ProxyScriptFetcherTest, HttpMimeType) {
+ TestServer server(kDocRoot);
+ SynchFetcher pac_fetcher;
+
+ { // Fetch a PAC with mime type "text/plain"
+ GURL url = server.TestServerPage("files/pac.txt");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::OK, result.code);
+ EXPECT_EQ("-pac.txt-\n", result.bytes);
+ }
+ { // Fetch a PAC with mime type "text/html"
+ GURL url = server.TestServerPage("files/pac.html");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::OK, result.code);
+ EXPECT_EQ("-pac.html-\n", result.bytes);
+ }
+ { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig"
+ GURL url = server.TestServerPage("files/pac.nsproxy");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::OK, result.code);
+ EXPECT_EQ("-pac.nsproxy-\n", result.bytes);
+ }
+}
+
+TEST(ProxyScriptFetcherTest, HttpStatusCode) {
+ TestServer server(kDocRoot);
+ SynchFetcher pac_fetcher;
+
+ { // Fetch a PAC which gives a 500 -- FAIL
+ GURL url = server.TestServerPage("files/500.pac");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code);
+ EXPECT_TRUE(result.bytes.empty());
+ }
+ { // Fetch a PAC which gives a 404 -- FAIL
+ GURL url = server.TestServerPage("files/404.pac");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code);
+ EXPECT_TRUE(result.bytes.empty());
+ }
+}
+
+TEST(ProxyScriptFetcherTest, ContentDisposition) {
+ TestServer server(kDocRoot);
+ SynchFetcher pac_fetcher;
+
+ // Fetch PAC scripts via HTTP with a Content-Disposition header -- should
+ // have no effect.
+ GURL url = server.TestServerPage("files/downloadable.pac");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::OK, result.code);
+ EXPECT_EQ("-downloadable.pac-\n", result.bytes);
+}
+
+TEST(ProxyScriptFetcherTest, TooLarge) {
+ TestServer server(kDocRoot);
+ SynchFetcher pac_fetcher;
+
+ // Set the maximum response size to 50 bytes.
+ int prev_size = net::ProxyScriptFetcher::SetSizeConstraintForUnittest(50);
+
+ // These two URLs are the same file, but are http:// vs file://
+ GURL urls[] = {
+ server.TestServerPage("files/large-pac.nsproxy"),
+ GetTestFileUrl("large-pac.nsproxy")
+ };
+
+ // Try fetching URLs that are 101 bytes large. We should abort the request
+ // after 50 bytes have been read, and fail with a too large error.
+ for (size_t i = 0; i < arraysize(urls); ++i) {
+ const GURL& url = urls[i];
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::ERR_FILE_TOO_BIG, result.code);
+ EXPECT_TRUE(result.bytes.empty());
+ }
+
+ // Restore the original size bound.
+ net::ProxyScriptFetcher::SetSizeConstraintForUnittest(prev_size);
+
+ { // Make sure we can still fetch regular URLs.
+ GURL url = server.TestServerPage("files/pac.nsproxy");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::OK, result.code);
+ EXPECT_EQ("-pac.nsproxy-\n", result.bytes);
+ }
+}
+
+TEST(ProxyScriptFetcherTest, Hang) {
+ TestServer server(kDocRoot);
+ SynchFetcher pac_fetcher;
+
+ // Set the timeout period to 0.5 seconds.
+ int prev_timeout =
+ net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(500);
+
+ // Try fetching a URL which takes 1.2 seconds. We should abort the request
+ // after 500 ms, and fail with a timeout error.
+ { GURL url = server.TestServerPage("slow/proxy.pac?1.2");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::ERR_TIMED_OUT, result.code);
+ EXPECT_TRUE(result.bytes.empty());
+ }
+
+ // Restore the original timeout period.
+ net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(prev_timeout);
+
+ { // Make sure we can still fetch regular URLs.
+ GURL url = server.TestServerPage("files/pac.nsproxy");
+ FetchResult result = pac_fetcher.Fetch(url);
+ EXPECT_EQ(net::OK, result.code);
+ EXPECT_EQ("-pac.nsproxy-\n", result.bytes);
+ }
+}
+
+} // namespace net