diff options
24 files changed, 717 insertions, 3 deletions
diff --git a/net/base/load_flags.h b/net/base/load_flags.h index bf5e02f..7e3642a 100644 --- a/net/base/load_flags.h +++ b/net/base/load_flags.h @@ -64,6 +64,10 @@ enum { // An SDCH dictionary was advertised, and an SDCH encoded response is // possible. LOAD_SDCH_DICTIONARY_ADVERTISED = 1 << 14, + + // Do not resolve proxies. This override is used when downloading PAC files + // to avoid having a circular dependency. + LOAD_BYPASS_PROXY = 1 << 15, }; } // namespace net diff --git a/net/base/net_error_list.h b/net/base/net_error_list.h index d632fff..c7c66c0 100644 --- a/net/base/net_error_list.h +++ b/net/base/net_error_list.h @@ -220,6 +220,9 @@ NET_ERROR(EMPTY_RESPONSE, -324) // The headers section of the response is too large. NET_ERROR(RESPONSE_HEADERS_TOO_BIG, -325) + +// The PAC requested by HTTP did not have a valid status code (non-200). +NET_ERROR(PAC_STATUS_NOT_OK, -326) // The cache does not have the requested entry. NET_ERROR(CACHE_MISS, -400) diff --git a/net/build/net.vcproj b/net/build/net.vcproj index 4530a27..0371f78 100644 --- a/net/build/net.vcproj +++ b/net/build/net.vcproj @@ -1029,6 +1029,14 @@ >
</File>
<File
+ RelativePath="..\proxy\proxy_script_fetcher.cc"
+ >
+ </File>
+ <File
+ RelativePath="..\proxy\proxy_script_fetcher.h"
+ >
+ </File>
+ <File
RelativePath="..\proxy\proxy_service.cc"
>
</File>
diff --git a/net/build/net_unittests.vcproj b/net/build/net_unittests.vcproj index db597ce..faa6f83 100644 --- a/net/build/net_unittests.vcproj +++ b/net/build/net_unittests.vcproj @@ -228,9 +228,9 @@ >
<File
RelativePath="..\http\http_auth_cache_unittest.cc"
- >
- </File>
- <File
+ >
+ </File>
+ <File
RelativePath="..\http\http_auth_handler_basic_unittest.cc"
>
</File>
@@ -415,6 +415,10 @@ Name="proxy"
>
<File
+ RelativePath="..\proxy\proxy_script_fetcher_unittest.cc"
+ >
+ </File>
+ <File
RelativePath="..\proxy\proxy_service_unittest.cc"
>
</File>
diff --git a/net/data/proxy_script_fetcher_unittest/404.pac b/net/data/proxy_script_fetcher_unittest/404.pac new file mode 100644 index 0000000..15e6da3 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/404.pac @@ -0,0 +1 @@ +-404.pac- diff --git a/net/data/proxy_script_fetcher_unittest/404.pac.mock-http-headers b/net/data/proxy_script_fetcher_unittest/404.pac.mock-http-headers new file mode 100644 index 0000000..d8378a5 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/404.pac.mock-http-headers @@ -0,0 +1,2 @@ +HTTP/1.1 404 OK +Content-Type: application/x-javascript-config diff --git a/net/data/proxy_script_fetcher_unittest/500.pac b/net/data/proxy_script_fetcher_unittest/500.pac new file mode 100644 index 0000000..fa4fa3e --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/500.pac @@ -0,0 +1 @@ +-500.pac- diff --git a/net/data/proxy_script_fetcher_unittest/500.pac.mock-http-headers b/net/data/proxy_script_fetcher_unittest/500.pac.mock-http-headers new file mode 100644 index 0000000..48114a5 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/500.pac.mock-http-headers @@ -0,0 +1,2 @@ +HTTP/1.1 500 OK +Content-Type: application/x-javascript-config diff --git a/net/data/proxy_script_fetcher_unittest/downloadable.pac b/net/data/proxy_script_fetcher_unittest/downloadable.pac new file mode 100644 index 0000000..5966436 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/downloadable.pac @@ -0,0 +1 @@ +-downloadable.pac- diff --git a/net/data/proxy_script_fetcher_unittest/downloadable.pac.mock-http-headers b/net/data/proxy_script_fetcher_unittest/downloadable.pac.mock-http-headers new file mode 100644 index 0000000..7efc4ad --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/downloadable.pac.mock-http-headers @@ -0,0 +1,3 @@ +HTTP/1.1 200 OK +Content-Type: application/x-javascript-config +Content-Disposition: attachment; filename="download-pac.pac" diff --git a/net/data/proxy_script_fetcher_unittest/large-pac.nsproxy b/net/data/proxy_script_fetcher_unittest/large-pac.nsproxy new file mode 100644 index 0000000..762ca2a --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/large-pac.nsproxy @@ -0,0 +1 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx diff --git a/net/data/proxy_script_fetcher_unittest/large-pac.nsproxy.mock-http-headers b/net/data/proxy_script_fetcher_unittest/large-pac.nsproxy.mock-http-headers new file mode 100644 index 0000000..56019e4 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/large-pac.nsproxy.mock-http-headers @@ -0,0 +1,2 @@ +HTTP/1.1 200 OK +Content-Type: application/x-ns-proxy-autoconfig diff --git a/net/data/proxy_script_fetcher_unittest/pac.html b/net/data/proxy_script_fetcher_unittest/pac.html new file mode 100644 index 0000000..7f5a993 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/pac.html @@ -0,0 +1 @@ +-pac.html- diff --git a/net/data/proxy_script_fetcher_unittest/pac.html.mock-http-headers b/net/data/proxy_script_fetcher_unittest/pac.html.mock-http-headers new file mode 100644 index 0000000..524e3d8 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/pac.html.mock-http-headers @@ -0,0 +1,2 @@ +HTTP/1.1 200 OK +Content-Type: text/html diff --git a/net/data/proxy_script_fetcher_unittest/pac.nsproxy b/net/data/proxy_script_fetcher_unittest/pac.nsproxy new file mode 100644 index 0000000..7fe7da4 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/pac.nsproxy @@ -0,0 +1 @@ +-pac.nsproxy- diff --git a/net/data/proxy_script_fetcher_unittest/pac.nsproxy.mock-http-headers b/net/data/proxy_script_fetcher_unittest/pac.nsproxy.mock-http-headers new file mode 100644 index 0000000..56019e4 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/pac.nsproxy.mock-http-headers @@ -0,0 +1,2 @@ +HTTP/1.1 200 OK +Content-Type: application/x-ns-proxy-autoconfig diff --git a/net/data/proxy_script_fetcher_unittest/pac.txt b/net/data/proxy_script_fetcher_unittest/pac.txt new file mode 100644 index 0000000..9950aa5 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/pac.txt @@ -0,0 +1 @@ +-pac.txt- diff --git a/net/data/proxy_script_fetcher_unittest/pac.txt.mock-http-headers b/net/data/proxy_script_fetcher_unittest/pac.txt.mock-http-headers new file mode 100644 index 0000000..5c695b9 --- /dev/null +++ b/net/data/proxy_script_fetcher_unittest/pac.txt.mock-http-headers @@ -0,0 +1,2 @@ +HTTP/1.1 200 OK +Content-Type: text/plain diff --git a/net/http/http_network_transaction.cc b/net/http/http_network_transaction.cc index 6076fab..0ba3d1e 100644 --- a/net/http/http_network_transaction.cc +++ b/net/http/http_network_transaction.cc @@ -390,6 +390,11 @@ int HttpNetworkTransaction::DoResolveProxy() { next_state_ = STATE_RESOLVE_PROXY_COMPLETE; + if (request_->load_flags & LOAD_BYPASS_PROXY) { + proxy_info_.UseDirect(); + return OK; + } + return session_->proxy_service()->ResolveProxy( request_->url, &proxy_info_, &io_callback_, &pac_request_); } @@ -1028,6 +1033,10 @@ int HttpNetworkTransaction::ReconsiderProxyAfterError(int error) { return error; } + if (request_->load_flags & LOAD_BYPASS_PROXY) { + return error; + } + int rv = session_->proxy_service()->ReconsiderProxyAfterError( request_->url, &proxy_info_, &io_callback_, &pac_request_); if (rv == OK || rv == ERR_IO_PENDING) { diff --git a/net/net_lib.scons b/net/net_lib.scons index 9f080586..972dd31 100644 --- a/net/net_lib.scons +++ b/net/net_lib.scons @@ -79,6 +79,7 @@ input_files = [ 'http/http_vary_data.cc', 'http/winhttp_request_throttle.cc', 'proxy/proxy_resolver_fixed.cc', + 'proxy/proxy_script_fetcher.cc', 'proxy/proxy_service.cc', 'url_request/mime_sniffer_proxy.cc', 'url_request/url_request.cc', diff --git a/net/net_unittests.scons b/net/net_unittests.scons index 3535850..713505c 100644 --- a/net/net_unittests.scons +++ b/net/net_unittests.scons @@ -80,6 +80,7 @@ input_files = [ 'http/http_transaction_unittest.cc', 'http/http_util_unittest.cc', 'http/http_vary_data_unittest.cc', + 'proxy/proxy_script_fetcher_unittest.cc', 'proxy/proxy_service_unittest.cc', 'url_request/url_request_unittest.cc', ] diff --git a/net/proxy/proxy_script_fetcher.cc b/net/proxy/proxy_script_fetcher.cc new file mode 100644 index 0000000..4f9f603 --- /dev/null +++ b/net/proxy/proxy_script_fetcher.cc @@ -0,0 +1,301 @@ +// Copyright (c) 2008 The Chromium Authors. All rights reserved. Use of this +// source code is governed by a BSD-style license that can be found in the +// LICENSE file. + +#include "net/proxy/proxy_script_fetcher.h" + +#include "base/compiler_specific.h" +#include "base/message_loop.h" +#include "base/string_util.h" +#include "net/base/load_flags.h" +#include "net/url_request/url_request.h" + +// TODO(eroman): +// - Support auth-prompts. + +namespace net { + +namespace { + +// The maximum size (in bytes) allowed for a PAC script. Responses exceeding +// this will fail with ERR_FILE_TOO_BIG. +int max_response_bytes = 1048576; // 1 megabyte + +// The maximum duration (in milliseconds) allowed for fetching the PAC script. +// Responses exceeding this will fail with ERR_TIMED_OUT. +int max_duration_ms = 300000; // 5 minutes + +} // namespace + +class ProxyScriptFetcherImpl : public ProxyScriptFetcher, + public URLRequest::Delegate { + public: + // Creates a ProxyScriptFetcher that issues requests through + // |url_request_context|. |url_request_context| must remain valid for the + // lifetime of ProxyScriptFetcherImpl. + explicit ProxyScriptFetcherImpl(URLRequestContext* url_request_context); + + virtual ~ProxyScriptFetcherImpl(); + + // ProxyScriptFetcher methods: + + virtual void Fetch(const GURL& url, std::string* bytes, + CompletionCallback* callback); + virtual void Cancel(); + + // URLRequest::Delegate methods: + + virtual void OnAuthRequired(URLRequest* request, + AuthChallengeInfo* auth_info); + virtual void OnSSLCertificateError(URLRequest* request, int cert_error, + X509Certificate* cert); + virtual void OnReceivedRedirect(URLRequest* request, const GURL& to_url); + virtual void OnResponseStarted(URLRequest* request); + virtual void OnReadCompleted(URLRequest* request, int num_bytes); + virtual void OnResponseCompleted(URLRequest* request); + + private: + // Read more bytes from the response. + void ReadBody(URLRequest* request); + + // Called once the request has completed to notify the caller of + // |response_code_| and |response_bytes_|. + void FetchCompleted(); + + // Clear out the state for the current request. + void ResetCurRequestState(); + + // Callback for time-out task of request with id |id|. + void OnTimeout(int id); + + // Factory for creating the time-out task. This takes care of revoking + // outstanding tasks when |this| is deleted. + ScopedRunnableMethodFactory<ProxyScriptFetcherImpl> task_factory_; + + // The context used for making network requests. + URLRequestContext* url_request_context_; + + // Buffer that URLRequest writes into. + enum { kBufSize = 4096 }; + char buf_[kBufSize]; + + // The next ID to use for |cur_request_| (monotonically increasing). + int next_id_; + + // The current (in progress) request, or NULL. + scoped_ptr<URLRequest> cur_request_; + + // State for current request (only valid when |cur_request_| is not NULL): + + // Unique ID for the current request. + int cur_request_id_; + + // Callback to invoke on completion of the fetch. + CompletionCallback* callback_; + + // Holds the error condition that was hit on the current request, or OK. + int result_code_; + + // Holds the bytes read so far. Will not exceed |max_response_bytes|. This + // buffer is owned by the owner of |callback|. + std::string* result_bytes_; +}; + +ProxyScriptFetcherImpl::ProxyScriptFetcherImpl( + URLRequestContext* url_request_context) + : ALLOW_THIS_IN_INITIALIZER_LIST(task_factory_(this)), + url_request_context_(url_request_context), + next_id_(0), + cur_request_(NULL), + cur_request_id_(0), + callback_(NULL), + result_code_(OK), + result_bytes_(NULL) { + DCHECK(url_request_context); +} + +ProxyScriptFetcherImpl::~ProxyScriptFetcherImpl() { + // The URLRequest's destructor will cancel the outstanding request, and + // ensure that the delegate (this) is not called again. +} + +void ProxyScriptFetcherImpl::Fetch(const GURL& url, + std::string* bytes, + CompletionCallback* callback) { + // It is invalid to call Fetch() while a request is already in progress. + DCHECK(!cur_request_.get()); + + DCHECK(callback); + DCHECK(bytes); + + cur_request_.reset(new URLRequest(url, this)); + cur_request_->set_context(url_request_context_); + cur_request_->set_method("GET"); + + // Make sure that the PAC script is downloaded using a direct connection, + // to avoid circular dependencies (fetching is a part of proxy resolution). + cur_request_->set_load_flags(LOAD_BYPASS_PROXY); + + // Save the caller's info for notification on completion. + callback_ = callback; + result_bytes_ = bytes; + result_bytes_->clear(); + + // Post a task to timeout this request if it takes too long. + cur_request_id_ = ++next_id_; + MessageLoop::current()->PostDelayedTask(FROM_HERE, + task_factory_.NewRunnableMethod(&ProxyScriptFetcherImpl::OnTimeout, + cur_request_id_), + static_cast<int>(max_duration_ms)); + + // Start the request. + cur_request_->Start(); +} + +void ProxyScriptFetcherImpl::Cancel() { + // ResetCurRequestState will free the URLRequest, which will cause + // cancellation. + ResetCurRequestState(); +} + +void ProxyScriptFetcherImpl::OnAuthRequired(URLRequest* request, + AuthChallengeInfo* auth_info) { + DCHECK(request == cur_request_.get()); + // TODO(eroman): + result_code_ = ERR_NOT_IMPLEMENTED; + request->CancelAuth(); +} + +void ProxyScriptFetcherImpl::OnSSLCertificateError(URLRequest* request, + int cert_error, + X509Certificate* cert) { + DCHECK(request == cur_request_.get()); + // Certificate errors are in same space as net errors. + result_code_ = cert_error; + request->Cancel(); +} + +void ProxyScriptFetcherImpl::OnReceivedRedirect(URLRequest* request, + const GURL& to_url) { + DCHECK(request == cur_request_.get()); + // OK, thanks for telling. +} + +void ProxyScriptFetcherImpl::OnResponseStarted(URLRequest* request) { + DCHECK(request == cur_request_.get()); + + if (!request->status().is_success()) { + OnResponseCompleted(request); + return; + } + + // Require HTTP responses to have a success status code. + if (request->url().SchemeIs("http") || request->url().SchemeIs("https")) { + // NOTE about mime types: We do not enforce mime types on PAC files. + // This is for compatibility with {IE 7, Firefox 3, Opera 9.5} + + // NOTE about status codes: We are like Firefox 3 in this respect. + // {IE 7, Safari 3, Opera 9.5} do not care about the status code. + if (request->GetResponseCode() != 200) { + result_code_ = ERR_PAC_STATUS_NOT_OK; + request->Cancel(); + return; + } + } + + ReadBody(request); +} + +void ProxyScriptFetcherImpl::OnReadCompleted(URLRequest* request, + int num_bytes) { + DCHECK(request == cur_request_.get()); + if (num_bytes > 0) { + // Enforce maximum size bound. + if (num_bytes + result_bytes_->size() > + static_cast<size_t>(max_response_bytes)) { + result_code_ = ERR_FILE_TOO_BIG; + request->Cancel(); + return; + } + result_bytes_->append(buf_, num_bytes); + ReadBody(request); + } else { // Error while reading, or EOF + OnResponseCompleted(request); + } +} + +void ProxyScriptFetcherImpl::OnResponseCompleted(URLRequest* request) { + DCHECK(request == cur_request_.get()); + + // Use |result_code_| as the request's error if we have already set it to + // something specific. + if (result_code_ == OK && !request->status().is_success()) + result_code_ = request->status().os_error(); + + FetchCompleted(); +} + +void ProxyScriptFetcherImpl::ReadBody(URLRequest* request) { + int num_bytes; + if (request->Read(buf_, kBufSize, &num_bytes)) { + OnReadCompleted(request, num_bytes); + } else if (!request->status().is_io_pending()) { + // Read failed synchronously. + OnResponseCompleted(request); + } +} + +void ProxyScriptFetcherImpl::FetchCompleted() { + // On error, the caller expects empty string for bytes. + if (result_code_ != OK) + result_bytes_->clear(); + + int result_code = result_code_; + CompletionCallback* callback = callback_; + + ResetCurRequestState(); + + callback->Run(result_code); +} + +void ProxyScriptFetcherImpl::ResetCurRequestState() { + cur_request_.reset(); + cur_request_id_ = 0; + callback_ = NULL; + result_code_ = OK; + result_bytes_ = NULL; +} + +void ProxyScriptFetcherImpl::OnTimeout(int id) { + // Timeout tasks may outlive the URLRequest they reference. Make sure it + // is still applicable. + if (cur_request_id_ != id) + return; + + DCHECK(cur_request_.get()); + result_code_ = ERR_TIMED_OUT; + cur_request_->Cancel(); +} + +// static +ProxyScriptFetcher* ProxyScriptFetcher::Create( + URLRequestContext* url_request_context) { + return new ProxyScriptFetcherImpl(url_request_context); +} + +// static +int ProxyScriptFetcher::SetTimeoutConstraintForUnittest( + int timeout_ms) { + int prev = max_duration_ms; + max_duration_ms = timeout_ms; + return prev; +} + +// static +size_t ProxyScriptFetcher::SetSizeConstraintForUnittest(size_t size_bytes) { + size_t prev = max_response_bytes; + max_response_bytes = size_bytes; + return prev; +} + +} // namespace net diff --git a/net/proxy/proxy_script_fetcher.h b/net/proxy/proxy_script_fetcher.h new file mode 100644 index 0000000..fddbd7b --- /dev/null +++ b/net/proxy/proxy_script_fetcher.h @@ -0,0 +1,67 @@ +// Copyright (c) 2008 The Chromium Authors. All rights reserved. Use of this +// source code is governed by a BSD-style license that can be found in the +// LICENSE file. + +// ProxyScriptFetcher is an async interface for fetching a proxy auto config +// script. It is specific to fetching a PAC script; enforces timeout, max-size, +// status code. + +#ifndef NET_PROXY_PROXY_SCRIPT_FETCHER_H_ +#define NET_PROXY_PROXY_SCRIPT_FETCHER_H_ + +#include "net/base/completion_callback.h" +#include "testing/gtest/include/gtest/gtest_prod.h" + +class GURL; +class URLRequestContext; + +namespace net { + +class ProxyScriptFetcher { + public: + // Destruction should cancel any outstanding requests. + virtual ~ProxyScriptFetcher() {} + + // Downloads the given PAC URL, and invokes |callback| on completion. + // On success |callback| is executed with a result code of OK, and a + // string of the response bytes. On failure, the result bytes is an empty + // string, and the result code is a network error. Some special network + // errors that may occur are: + // + // ERR_TIMED_OUT -- the fetch took too long to complete. + // ERR_FILE_TOO_BIG -- the response's body was too large. + // ERR_PAC_STATUS_NOT_OK -- non-200 HTTP status code. + // ERR_NOT_IMPLEMENTED -- the response required authentication. + // + // If the request is cancelled (either using the "Cancel()" method or by + // deleting |this|), then no callback is invoked. + // + // Only one fetch is allowed to be outstanding at a time. + virtual void Fetch(const GURL& url, std::string* bytes, + CompletionCallback* callback) = 0; + + // Aborts the in-progress fetch (if any). + virtual void Cancel() = 0; + + // Create a ProxyScriptFetcher that uses |url_request_context|. + static ProxyScriptFetcher* Create(URLRequestContext* url_request_context); + + // -------------------------------------------------------------------------- + // Testing helpers (only available to unit-tests). + // -------------------------------------------------------------------------- + private: + FRIEND_TEST(ProxyScriptFetcherTest, Hang); + FRIEND_TEST(ProxyScriptFetcherTest, TooLarge); + + // Sets the maximum duration for a fetch to |timeout_ms|. Returns the previous + // bound. + static int SetTimeoutConstraintForUnittest(int timeout_ms); + + // Sets the maximum response size for a fetch to |size_bytes|. Returns the + // previous bound. + static size_t SetSizeConstraintForUnittest(size_t size_bytes); +}; + +} // namespace net + +#endif // NET_PROXY_PROXY_SCRIPT_FETCHER_H_ diff --git a/net/proxy/proxy_script_fetcher_unittest.cc b/net/proxy/proxy_script_fetcher_unittest.cc new file mode 100644 index 0000000..73476ed --- /dev/null +++ b/net/proxy/proxy_script_fetcher_unittest.cc @@ -0,0 +1,294 @@ +// Copyright (c) 2008 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "net/proxy/proxy_script_fetcher.h" + +#include "base/file_path.h" +#include "base/compiler_specific.h" +#include "base/path_service.h" +#include "net/base/net_util.h" +#include "net/url_request/url_request_unittest.h" +#include "testing/gtest/include/gtest/gtest.h" + +// TODO(eroman): +// - Test canceling an outstanding request. +// - Test deleting ProxyScriptFetcher while a request is in progress. + +const wchar_t kDocRoot[] = L"net/data/proxy_script_fetcher_unittest"; + +struct FetchResult { + int code; + std::string bytes; +}; + +// A non-mock URL request which can access http:// and file:// urls. +class RequestContext : public URLRequestContext { + public: + RequestContext() { + net::ProxyInfo no_proxy; + proxy_service_ = net::ProxyService::Create(&no_proxy); + http_transaction_factory_ = net::HttpNetworkLayer::CreateFactory( + proxy_service_); + } + ~RequestContext() { + delete http_transaction_factory_; + delete proxy_service_; + } +}; + +// Helper for doing synch fetches. This object lives in SynchFetcher's +// |io_thread_| and communicates with SynchFetcher though (|result|, |event|). +class SynchFetcherThreadHelper { + public: + SynchFetcherThreadHelper(base::WaitableEvent* event, FetchResult* result) + : event_(event), + fetch_result_(result), + url_request_context_(NULL), + fetcher_(NULL), + ALLOW_THIS_IN_INITIALIZER_LIST( + callback_(this, &SynchFetcherThreadHelper::OnFetchCompletion)) { + url_request_context_ = new RequestContext; + fetcher_.reset(net::ProxyScriptFetcher::Create(url_request_context_.get())); + } + + // Starts fetching the script at |url|. Upon completion |event_| will be + // signalled, and the bytes read will have been written to |fetch_result_|. + void Start(const GURL& url) { + fetcher_->Fetch(url, &fetch_result_->bytes, &callback_); + } + + void OnFetchCompletion(int result) { + fetch_result_->code = result; + event_->Signal(); + } + + private: + base::WaitableEvent* event_; + FetchResult* fetch_result_; + + scoped_refptr<URLRequestContext> url_request_context_; + + scoped_ptr<net::ProxyScriptFetcher> fetcher_; + net::CompletionCallbackImpl<SynchFetcherThreadHelper> callback_; +}; + +// Helper that wraps ProxyScriptFetcher::Fetch() with a synchronous interface. +// It executes Fetch() on a helper thread (IO_Thread). +class SynchFetcher { + public: + SynchFetcher() + : event_(false, false), + io_thread_("IO_Thread"), + thread_helper_(NULL) { + // Start an IO thread. + base::Thread::Options options; + options.message_loop_type = MessageLoop::TYPE_IO; + io_thread_.StartWithOptions(options); + + // Initialize the state in |io_thread_|. + io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod( + this, &SynchFetcher::Init)); + Wait(); + } + + ~SynchFetcher() { + // Tear down the state in |io_thread_|. + io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod( + this, &SynchFetcher::Cleanup)); + Wait(); + } + + // Synchronously fetch the url. + FetchResult Fetch(const GURL& url) { + io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod( + this, &SynchFetcher::AsynchFetch, url)); + Wait(); + return fetch_result_; + } + + private: + // [Runs on |io_thread_|] Allocates the URLRequestContext and the + // ProxyScriptFetcher, which live inside |thread_helper_|. + void Init() { + thread_helper_ = new SynchFetcherThreadHelper(&event_, &fetch_result_); + event_.Signal(); + } + + // [Runs on |io_thread_|] Signals |event_| on completion. + void AsynchFetch(const GURL& url) { + thread_helper_->Start(url); + } + + // [Runs on |io_thread_|] Signals |event_| on cleanup completion. + void Cleanup() { + delete thread_helper_; + thread_helper_ = NULL; + MessageLoop::current()->RunAllPending(); + event_.Signal(); + } + + void Wait() { + event_.Wait(); + event_.Reset(); + } + + base::WaitableEvent event_; + base::Thread io_thread_; + FetchResult fetch_result_; + // Holds all the state that lives on the IO thread, for easy cleanup. + SynchFetcherThreadHelper* thread_helper_; +}; + +// Template specialization so SynchFetcher does not have to be refcounted. +template<> +void RunnableMethodTraits<SynchFetcher>::RetainCallee(SynchFetcher* remover) {} +template<> +void RunnableMethodTraits<SynchFetcher>::ReleaseCallee(SynchFetcher* remover) {} + +// Required to be in net namespace by FRIEND_TEST. +namespace net { + +// Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest. +GURL GetTestFileUrl(const std::string& relpath) { + FilePath path; + PathService::Get(base::DIR_SOURCE_ROOT, &path); + path = path.Append(FILE_PATH_LITERAL("net")); + path = path.Append(FILE_PATH_LITERAL("data")); + path = path.Append(FILE_PATH_LITERAL("proxy_script_fetcher_unittest")); + GURL base_url = net::FilePathToFileURL(path); + return GURL(base_url.spec() + "/" + relpath); +} + +TEST(ProxyScriptFetcherTest, FileUrl) { + SynchFetcher pac_fetcher; + + { // Fetch a non-existent file. + FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist")); + EXPECT_EQ(net::ERR_FILE_NOT_FOUND, result.code); + EXPECT_TRUE(result.bytes.empty()); + } + { // Fetch a file that exists. + FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt")); + EXPECT_EQ(net::OK, result.code); + EXPECT_EQ("-pac.txt-\n", result.bytes); + } +} + +// Note that all mime types are allowed for PAC file, to be consistent +// with other browsers. +TEST(ProxyScriptFetcherTest, HttpMimeType) { + TestServer server(kDocRoot); + SynchFetcher pac_fetcher; + + { // Fetch a PAC with mime type "text/plain" + GURL url = server.TestServerPage("files/pac.txt"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::OK, result.code); + EXPECT_EQ("-pac.txt-\n", result.bytes); + } + { // Fetch a PAC with mime type "text/html" + GURL url = server.TestServerPage("files/pac.html"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::OK, result.code); + EXPECT_EQ("-pac.html-\n", result.bytes); + } + { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig" + GURL url = server.TestServerPage("files/pac.nsproxy"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::OK, result.code); + EXPECT_EQ("-pac.nsproxy-\n", result.bytes); + } +} + +TEST(ProxyScriptFetcherTest, HttpStatusCode) { + TestServer server(kDocRoot); + SynchFetcher pac_fetcher; + + { // Fetch a PAC which gives a 500 -- FAIL + GURL url = server.TestServerPage("files/500.pac"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code); + EXPECT_TRUE(result.bytes.empty()); + } + { // Fetch a PAC which gives a 404 -- FAIL + GURL url = server.TestServerPage("files/404.pac"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code); + EXPECT_TRUE(result.bytes.empty()); + } +} + +TEST(ProxyScriptFetcherTest, ContentDisposition) { + TestServer server(kDocRoot); + SynchFetcher pac_fetcher; + + // Fetch PAC scripts via HTTP with a Content-Disposition header -- should + // have no effect. + GURL url = server.TestServerPage("files/downloadable.pac"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::OK, result.code); + EXPECT_EQ("-downloadable.pac-\n", result.bytes); +} + +TEST(ProxyScriptFetcherTest, TooLarge) { + TestServer server(kDocRoot); + SynchFetcher pac_fetcher; + + // Set the maximum response size to 50 bytes. + int prev_size = net::ProxyScriptFetcher::SetSizeConstraintForUnittest(50); + + // These two URLs are the same file, but are http:// vs file:// + GURL urls[] = { + server.TestServerPage("files/large-pac.nsproxy"), + GetTestFileUrl("large-pac.nsproxy") + }; + + // Try fetching URLs that are 101 bytes large. We should abort the request + // after 50 bytes have been read, and fail with a too large error. + for (size_t i = 0; i < arraysize(urls); ++i) { + const GURL& url = urls[i]; + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::ERR_FILE_TOO_BIG, result.code); + EXPECT_TRUE(result.bytes.empty()); + } + + // Restore the original size bound. + net::ProxyScriptFetcher::SetSizeConstraintForUnittest(prev_size); + + { // Make sure we can still fetch regular URLs. + GURL url = server.TestServerPage("files/pac.nsproxy"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::OK, result.code); + EXPECT_EQ("-pac.nsproxy-\n", result.bytes); + } +} + +TEST(ProxyScriptFetcherTest, Hang) { + TestServer server(kDocRoot); + SynchFetcher pac_fetcher; + + // Set the timeout period to 0.5 seconds. + int prev_timeout = + net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(500); + + // Try fetching a URL which takes 1.2 seconds. We should abort the request + // after 500 ms, and fail with a timeout error. + { GURL url = server.TestServerPage("slow/proxy.pac?1.2"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::ERR_TIMED_OUT, result.code); + EXPECT_TRUE(result.bytes.empty()); + } + + // Restore the original timeout period. + net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(prev_timeout); + + { // Make sure we can still fetch regular URLs. + GURL url = server.TestServerPage("files/pac.nsproxy"); + FetchResult result = pac_fetcher.Fetch(url); + EXPECT_EQ(net::OK, result.code); + EXPECT_EQ("-pac.nsproxy-\n", result.bytes); + } +} + +} // namespace net |