diff options
author | eroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-10-16 23:10:33 +0000 |
---|---|---|
committer | eroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-10-16 23:10:33 +0000 |
commit | 869336177906f183c223f304c8455b424d64b54f (patch) | |
tree | 44afa6f07a120acc8c383a857b9e504df74c04ae /net | |
parent | 93bd86a6b4678b9dfc5b263107a90131974ef529 (diff) | |
download | chromium_src-869336177906f183c223f304c8455b424d64b54f.zip chromium_src-869336177906f183c223f304c8455b424d64b54f.tar.gz chromium_src-869336177906f183c223f304c8455b424d64b54f.tar.bz2 |
Cleanup: Move the ProxyScriptFetcher registry from being a global in net, to living in IOThread.
I had to make some other changes to make this fit well: moved ProxyScriptFetcherImpl to its own set of files, and added a IOThread::Get() accessor to avoid plumbing through several layers in connection_tester.
I find the registry living in IOThread is preferable, as globals in net/ limit the ability to run on safely on multiple threads, and also leads to confusion on what needs to be called at shutdown.
Review URL: http://codereview.chromium.org/3823001
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@62876 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net')
-rw-r--r-- | net/net.gyp | 5 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher.h | 24 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl.cc (renamed from net/proxy/proxy_script_fetcher.cc) | 166 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl.h | 119 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl_unittest.cc (renamed from net/proxy/proxy_script_fetcher_unittest.cc) | 52 | ||||
-rw-r--r-- | net/proxy/proxy_service.cc | 19 | ||||
-rw-r--r-- | net/proxy/proxy_service.h | 17 |
7 files changed, 188 insertions, 214 deletions
diff --git a/net/net.gyp b/net/net.gyp index 768cb6e..699f093 100644 --- a/net/net.gyp +++ b/net/net.gyp @@ -541,8 +541,9 @@ 'proxy/proxy_resolver_winhttp.cc', 'proxy/proxy_resolver_winhttp.h', 'proxy/proxy_retry_info.h', - 'proxy/proxy_script_fetcher.cc', 'proxy/proxy_script_fetcher.h', + 'proxy/proxy_script_fetcher_impl.cc', + 'proxy/proxy_script_fetcher_impl.h', 'proxy/proxy_server.cc', 'proxy/proxy_server_mac.cc', 'proxy/proxy_server.h', @@ -893,7 +894,7 @@ 'proxy/proxy_list_unittest.cc', 'proxy/proxy_resolver_js_bindings_unittest.cc', 'proxy/proxy_resolver_v8_unittest.cc', - 'proxy/proxy_script_fetcher_unittest.cc', + 'proxy/proxy_script_fetcher_impl_unittest.cc', 'proxy/proxy_server_unittest.cc', 'proxy/proxy_service_unittest.cc', 'proxy/sync_host_resolver_bridge_unittest.cc', diff --git a/net/proxy/proxy_script_fetcher.h b/net/proxy/proxy_script_fetcher.h index 3f2602f..0a28065 100644 --- a/net/proxy/proxy_script_fetcher.h +++ b/net/proxy/proxy_script_fetcher.h @@ -10,7 +10,6 @@ #define NET_PROXY_PROXY_SCRIPT_FETCHER_H_ #pragma once -#include "base/gtest_prod_util.h" #include "base/string16.h" #include "net/base/completion_callback.h" @@ -19,6 +18,8 @@ class URLRequestContext; namespace net { +// Interface for downloading a PAC script. Implementations can enforce +// timeouts, maximum size constraints, content encoding, etc.. class ProxyScriptFetcher { public: // Destruction should cancel any outstanding requests. @@ -48,29 +49,8 @@ class ProxyScriptFetcher { // Returns the request context that this fetcher uses to issue downloads, // or NULL. virtual URLRequestContext* GetRequestContext() { return NULL; } - - // Create a ProxyScriptFetcher that uses |url_request_context|. - static ProxyScriptFetcher* Create(URLRequestContext* url_request_context); - - // -------------------------------------------------------------------------- - // Testing helpers (only available to unit-tests). - // -------------------------------------------------------------------------- - private: - FRIEND_TEST_ALL_PREFIXES(ProxyScriptFetcherTest, Hang); - FRIEND_TEST_ALL_PREFIXES(ProxyScriptFetcherTest, TooLarge); - - // Sets the maximum duration for a fetch to |timeout_ms|. Returns the previous - // bound. - static int SetTimeoutConstraintForUnittest(int timeout_ms); - - // Sets the maximum response size for a fetch to |size_bytes|. Returns the - // previous bound. - static size_t SetSizeConstraintForUnittest(size_t size_bytes); }; -// Cancels all current proxy script fetches. -void EnsureNoProxyScriptFetches(); - } // namespace net #endif // NET_PROXY_PROXY_SCRIPT_FETCHER_H_ diff --git a/net/proxy/proxy_script_fetcher.cc b/net/proxy/proxy_script_fetcher_impl.cc index c920331..035622f 100644 --- a/net/proxy/proxy_script_fetcher.cc +++ b/net/proxy/proxy_script_fetcher_impl.cc @@ -2,40 +2,35 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "net/proxy/proxy_script_fetcher.h" +#include "net/proxy/proxy_script_fetcher_impl.h" -#include <set> - -#include "base/basictypes.h" #include "base/compiler_specific.h" #include "base/i18n/icu_string_conversions.h" -#include "base/lazy_instance.h" #include "base/logging.h" #include "base/message_loop.h" -#include "base/ref_counted.h" -#include "base/stl_util-inl.h" #include "base/string_util.h" -#include "base/utf_string_conversions.h" #include "net/base/io_buffer.h" #include "net/base/load_flags.h" #include "net/base/net_errors.h" #include "net/http/http_response_headers.h" -#include "net/url_request/url_request.h" +#include "net/url_request/url_request_context.h" // TODO(eroman): // - Support auth-prompts. +// TODO(eroman): Use a state machine rather than recursion. Recursion could +// lead to lots of frames. namespace net { namespace { // The maximum size (in bytes) allowed for a PAC script. Responses exceeding // this will fail with ERR_FILE_TOO_BIG. -int max_response_bytes = 1048576; // 1 megabyte +const int kDefaultMaxResponseBytes = 1048576; // 1 megabyte // The maximum duration (in milliseconds) allowed for fetching the PAC script. // Responses exceeding this will fail with ERR_TIMED_OUT. -int max_duration_ms = 300000; // 5 minutes +const int kDefaultMaxDurationMs = 300000; // 5 minutes // Returns true if |mime_type| is one of the known PAC mime type. bool IsPacMimeType(const std::string& mime_type) { @@ -74,124 +69,8 @@ void ConvertResponseToUTF16(const std::string& charset, utf16); } -class ProxyScriptFetcherTracker { - public: - void AddFetcher(ProxyScriptFetcher* fetcher) { - DCHECK(!ContainsKey(fetchers_, fetcher)); - fetchers_.insert(fetcher); - } - - void RemoveFetcher(ProxyScriptFetcher* fetcher) { - DCHECK(ContainsKey(fetchers_, fetcher)); - fetchers_.erase(fetcher); - } - - void CancelAllFetches() { - for (std::set<ProxyScriptFetcher*>::const_iterator it = fetchers_.begin(); - it != fetchers_.end(); ++it) { - (*it)->Cancel(); - } - } - - private: - ProxyScriptFetcherTracker(); - ~ProxyScriptFetcherTracker(); - - friend struct base::DefaultLazyInstanceTraits<ProxyScriptFetcherTracker>; - - std::set<ProxyScriptFetcher*> fetchers_; - DISALLOW_COPY_AND_ASSIGN(ProxyScriptFetcherTracker); -}; - -ProxyScriptFetcherTracker::ProxyScriptFetcherTracker() {} -ProxyScriptFetcherTracker::~ProxyScriptFetcherTracker() {} - -base::LazyInstance<ProxyScriptFetcherTracker> - g_fetcher_tracker(base::LINKER_INITIALIZED); - } // namespace -void EnsureNoProxyScriptFetches() { - g_fetcher_tracker.Get().CancelAllFetches(); -} - -class ProxyScriptFetcherImpl : public ProxyScriptFetcher, - public URLRequest::Delegate { - public: - // Creates a ProxyScriptFetcher that issues requests through - // |url_request_context|. |url_request_context| must remain valid for the - // lifetime of ProxyScriptFetcherImpl. - explicit ProxyScriptFetcherImpl(URLRequestContext* url_request_context); - - virtual ~ProxyScriptFetcherImpl(); - - // ProxyScriptFetcher methods: - - virtual int Fetch(const GURL& url, string16* text, - CompletionCallback* callback); - virtual void Cancel(); - virtual URLRequestContext* GetRequestContext(); - - // URLRequest::Delegate methods: - - virtual void OnAuthRequired(URLRequest* request, - AuthChallengeInfo* auth_info); - virtual void OnSSLCertificateError(URLRequest* request, int cert_error, - X509Certificate* cert); - virtual void OnResponseStarted(URLRequest* request); - virtual void OnReadCompleted(URLRequest* request, int num_bytes); - virtual void OnResponseCompleted(URLRequest* request); - - private: - // Read more bytes from the response. - void ReadBody(URLRequest* request); - - // Called once the request has completed to notify the caller of - // |response_code_| and |response_text_|. - void FetchCompleted(); - - // Clear out the state for the current request. - void ResetCurRequestState(); - - // Callback for time-out task of request with id |id|. - void OnTimeout(int id); - - // Factory for creating the time-out task. This takes care of revoking - // outstanding tasks when |this| is deleted. - ScopedRunnableMethodFactory<ProxyScriptFetcherImpl> task_factory_; - - // The context used for making network requests. - URLRequestContext* url_request_context_; - - // Buffer that URLRequest writes into. - enum { kBufSize = 4096 }; - scoped_refptr<net::IOBuffer> buf_; - - // The next ID to use for |cur_request_| (monotonically increasing). - int next_id_; - - // The current (in progress) request, or NULL. - scoped_ptr<URLRequest> cur_request_; - - // State for current request (only valid when |cur_request_| is not NULL): - - // Unique ID for the current request. - int cur_request_id_; - - // Callback to invoke on completion of the fetch. - CompletionCallback* callback_; - - // Holds the error condition that was hit on the current request, or OK. - int result_code_; - - // Holds the bytes read so far. Will not exceed |max_response_bytes|. - std::string bytes_read_so_far_; - - // This buffer is owned by the owner of |callback|, and will be filled with - // UTF16 response on completion. - string16* result_text_; -}; - ProxyScriptFetcherImpl::ProxyScriptFetcherImpl( URLRequestContext* url_request_context) : ALLOW_THIS_IN_INITIALIZER_LIST(task_factory_(this)), @@ -202,13 +81,13 @@ ProxyScriptFetcherImpl::ProxyScriptFetcherImpl( cur_request_id_(0), callback_(NULL), result_code_(OK), - result_text_(NULL) { + result_text_(NULL), + max_response_bytes_(kDefaultMaxResponseBytes), + max_duration_(base::TimeDelta::FromMilliseconds(kDefaultMaxDurationMs)) { DCHECK(url_request_context); - g_fetcher_tracker.Get().AddFetcher(this); } ProxyScriptFetcherImpl::~ProxyScriptFetcherImpl() { - g_fetcher_tracker.Get().RemoveFetcher(this); // The URLRequest's destructor will cancel the outstanding request, and // ensure that the delegate (this) is not called again. } @@ -244,7 +123,7 @@ int ProxyScriptFetcherImpl::Fetch(const GURL& url, MessageLoop::current()->PostDelayedTask(FROM_HERE, task_factory_.NewRunnableMethod(&ProxyScriptFetcherImpl::OnTimeout, cur_request_id_), - static_cast<int>(max_duration_ms)); + static_cast<int>(max_duration_.InMilliseconds())); // Start the request. cur_request_->Start(); @@ -320,7 +199,7 @@ void ProxyScriptFetcherImpl::OnReadCompleted(URLRequest* request, if (num_bytes > 0) { // Enforce maximum size bound. if (num_bytes + bytes_read_so_far_.size() > - static_cast<size_t>(max_response_bytes)) { + static_cast<size_t>(max_response_bytes_)) { result_code_ = ERR_FILE_TOO_BIG; request->Cancel(); return; @@ -391,26 +270,17 @@ void ProxyScriptFetcherImpl::OnTimeout(int id) { cur_request_->Cancel(); } -// static -ProxyScriptFetcher* ProxyScriptFetcher::Create( - URLRequestContext* url_request_context) { - return new ProxyScriptFetcherImpl(url_request_context); -} - -// static -int ProxyScriptFetcher::SetTimeoutConstraintForUnittest( - int timeout_ms) { - int prev = max_duration_ms; - max_duration_ms = timeout_ms; +base::TimeDelta ProxyScriptFetcherImpl::SetTimeoutConstraint( + base::TimeDelta timeout) { + base::TimeDelta prev = max_duration_; + max_duration_ = timeout; return prev; } -// static -size_t ProxyScriptFetcher::SetSizeConstraintForUnittest(size_t size_bytes) { - size_t prev = max_response_bytes; - max_response_bytes = size_bytes; +size_t ProxyScriptFetcherImpl::SetSizeConstraint(size_t size_bytes) { + size_t prev = max_response_bytes_; + max_response_bytes_ = size_bytes; return prev; } - } // namespace net diff --git a/net/proxy/proxy_script_fetcher_impl.h b/net/proxy/proxy_script_fetcher_impl.h new file mode 100644 index 0000000..7d2e1c6 --- /dev/null +++ b/net/proxy/proxy_script_fetcher_impl.h @@ -0,0 +1,119 @@ +// Copyright (c) 2010 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef NET_PROXY_PROXY_SCRIPT_FETCHER_IMPL_H_ +#define NET_PROXY_PROXY_SCRIPT_FETCHER_IMPL_H_ +#pragma once + +#include "base/basictypes.h" +#include "base/ref_counted.h" +#include "base/string16.h" +#include "base/task.h" +#include "base/time.h" +#include "net/proxy/proxy_script_fetcher.h" +#include "net/url_request/url_request.h" + +class GURL; +class URLRequestContext; +class X509Certificate; + +namespace net { + +// Implementation of ProxyScriptFetcher that downloads scripts using the +// specified request context. +class ProxyScriptFetcherImpl : public ProxyScriptFetcher, + public URLRequest::Delegate { + public: + // Creates a ProxyScriptFetcher that issues requests through + // |url_request_context|. |url_request_context| must remain valid for the + // lifetime of ProxyScriptFetcherImpl. + // Note that while a request is in progress, we will be holding a reference + // to |url_request_context|. Be careful not to create cycles between the + // fetcher and the context; you can break such cycles by calling Cancel(). + explicit ProxyScriptFetcherImpl(URLRequestContext* url_request_context); + + virtual ~ProxyScriptFetcherImpl(); + + // ProxyScriptFetcher methods: + + virtual int Fetch(const GURL& url, string16* text, + CompletionCallback* callback); + virtual void Cancel(); + virtual URLRequestContext* GetRequestContext(); + + // URLRequest::Delegate methods: + + virtual void OnAuthRequired(URLRequest* request, + AuthChallengeInfo* auth_info); + virtual void OnSSLCertificateError(URLRequest* request, int cert_error, + X509Certificate* cert); + virtual void OnResponseStarted(URLRequest* request); + virtual void OnReadCompleted(URLRequest* request, int num_bytes); + virtual void OnResponseCompleted(URLRequest* request); + + // Used by unit-tests to modify the default limits. + base::TimeDelta SetTimeoutConstraint(base::TimeDelta timeout); + size_t SetSizeConstraint(size_t size_bytes); + + private: + // Read more bytes from the response. + void ReadBody(URLRequest* request); + + // Called once the request has completed to notify the caller of + // |response_code_| and |response_text_|. + void FetchCompleted(); + + // Clear out the state for the current request. + void ResetCurRequestState(); + + // Callback for time-out task of request with id |id|. + void OnTimeout(int id); + + // Factory for creating the time-out task. This takes care of revoking + // outstanding tasks when |this| is deleted. + ScopedRunnableMethodFactory<ProxyScriptFetcherImpl> task_factory_; + + // The context used for making network requests. + URLRequestContext* url_request_context_; + + // Buffer that URLRequest writes into. + enum { kBufSize = 4096 }; + scoped_refptr<net::IOBuffer> buf_; + + // The next ID to use for |cur_request_| (monotonically increasing). + int next_id_; + + // The current (in progress) request, or NULL. + scoped_ptr<URLRequest> cur_request_; + + // State for current request (only valid when |cur_request_| is not NULL): + + // Unique ID for the current request. + int cur_request_id_; + + // Callback to invoke on completion of the fetch. + CompletionCallback* callback_; + + // Holds the error condition that was hit on the current request, or OK. + int result_code_; + + // Holds the bytes read so far. Will not exceed |max_response_bytes|. + std::string bytes_read_so_far_; + + // This buffer is owned by the owner of |callback|, and will be filled with + // UTF16 response on completion. + string16* result_text_; + + // The maximum number of bytes to allow in responses. + size_t max_response_bytes_; + + // The maximum amount of time to wait for download to complete. + base::TimeDelta max_duration_; + + DISALLOW_COPY_AND_ASSIGN(ProxyScriptFetcherImpl); +}; + +} // namespace net + +#endif // NET_PROXY_PROXY_SCRIPT_FETCHER_IMPL_H_ diff --git a/net/proxy/proxy_script_fetcher_unittest.cc b/net/proxy/proxy_script_fetcher_impl_unittest.cc index ce54a9b..2634f99 100644 --- a/net/proxy/proxy_script_fetcher_unittest.cc +++ b/net/proxy/proxy_script_fetcher_impl_unittest.cc @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "net/proxy/proxy_script_fetcher.h" +#include "net/proxy/proxy_script_fetcher_impl.h" #include "base/file_path.h" #include "base/compiler_specific.h" @@ -67,9 +67,9 @@ GURL GetTestFileUrl(const std::string& relpath) { return GURL(base_url.spec() + "/" + relpath); } -class ProxyScriptFetcherTest : public PlatformTest { +class ProxyScriptFetcherImplTest : public PlatformTest { public: - ProxyScriptFetcherTest() + ProxyScriptFetcherImplTest() : test_server_(net::TestServer::TYPE_HTTP, FilePath(kDocRoot)) { } @@ -77,10 +77,10 @@ class ProxyScriptFetcherTest : public PlatformTest { net::TestServer test_server_; }; -TEST_F(ProxyScriptFetcherTest, FileUrl) { +TEST_F(ProxyScriptFetcherImplTest, FileUrl) { scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); { // Fetch a non-existent file. string16 text; @@ -104,12 +104,12 @@ TEST_F(ProxyScriptFetcherTest, FileUrl) { // Note that all mime types are allowed for PAC file, to be consistent // with other browsers. -TEST_F(ProxyScriptFetcherTest, HttpMimeType) { +TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); { // Fetch a PAC with mime type "text/plain" GURL url(test_server_.GetURL("files/pac.txt")); @@ -140,12 +140,12 @@ TEST_F(ProxyScriptFetcherTest, HttpMimeType) { } } -TEST_F(ProxyScriptFetcherTest, HttpStatusCode) { +TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); { // Fetch a PAC which gives a 500 -- FAIL GURL url(test_server_.GetURL("files/500.pac")); @@ -167,12 +167,12 @@ TEST_F(ProxyScriptFetcherTest, HttpStatusCode) { } } -TEST_F(ProxyScriptFetcherTest, ContentDisposition) { +TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); // Fetch PAC scripts via HTTP with a Content-Disposition header -- should // have no effect. @@ -185,12 +185,12 @@ TEST_F(ProxyScriptFetcherTest, ContentDisposition) { EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text); } -TEST_F(ProxyScriptFetcherTest, NoCache) { +TEST_F(ProxyScriptFetcherImplTest, NoCache) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour. GURL url(test_server_.GetURL("files/cacheable_1hr.pac")); @@ -218,15 +218,15 @@ TEST_F(ProxyScriptFetcherTest, NoCache) { } } -TEST_F(ProxyScriptFetcherTest, TooLarge) { +TEST_F(ProxyScriptFetcherImplTest, TooLarge) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; - scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + scoped_ptr<ProxyScriptFetcherImpl> pac_fetcher( + new ProxyScriptFetcherImpl(context)); // Set the maximum response size to 50 bytes. - int prev_size = ProxyScriptFetcher::SetSizeConstraintForUnittest(50); + int prev_size = pac_fetcher->SetSizeConstraint(50); // These two URLs are the same file, but are http:// vs file:// GURL urls[] = { @@ -247,7 +247,7 @@ TEST_F(ProxyScriptFetcherTest, TooLarge) { } // Restore the original size bound. - ProxyScriptFetcher::SetSizeConstraintForUnittest(prev_size); + pac_fetcher->SetSizeConstraint(prev_size); { // Make sure we can still fetch regular URLs. GURL url(test_server_.GetURL("files/pac.nsproxy")); @@ -260,16 +260,16 @@ TEST_F(ProxyScriptFetcherTest, TooLarge) { } } -TEST_F(ProxyScriptFetcherTest, Hang) { +TEST_F(ProxyScriptFetcherImplTest, Hang) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; - scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + scoped_ptr<ProxyScriptFetcherImpl> pac_fetcher( + new ProxyScriptFetcherImpl(context)); // Set the timeout period to 0.5 seconds. - int prev_timeout = - ProxyScriptFetcher::SetTimeoutConstraintForUnittest(500); + base::TimeDelta prev_timeout = pac_fetcher->SetTimeoutConstraint( + base::TimeDelta::FromMilliseconds(500)); // Try fetching a URL which takes 1.2 seconds. We should abort the request // after 500 ms, and fail with a timeout error. @@ -283,7 +283,7 @@ TEST_F(ProxyScriptFetcherTest, Hang) { } // Restore the original timeout period. - ProxyScriptFetcher::SetTimeoutConstraintForUnittest(prev_timeout); + pac_fetcher->SetTimeoutConstraint(prev_timeout); { // Make sure we can still fetch regular URLs. GURL url(test_server_.GetURL("files/pac.nsproxy")); @@ -299,12 +299,12 @@ TEST_F(ProxyScriptFetcherTest, Hang) { // The ProxyScriptFetcher should decode any content-codings // (like gzip, bzip, etc.), and apply any charset conversions to yield // UTF8. -TEST_F(ProxyScriptFetcherTest, Encodings) { +TEST_F(ProxyScriptFetcherImplTest, Encodings) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); // Test a response that is gzip-encoded -- should get inflated. { diff --git a/net/proxy/proxy_service.cc b/net/proxy/proxy_service.cc index 4c27170..db22cc1 100644 --- a/net/proxy/proxy_service.cc +++ b/net/proxy/proxy_service.cc @@ -391,20 +391,20 @@ ProxyService::ProxyService(ProxyConfigService* config_service, ProxyService* ProxyService::CreateUsingV8ProxyResolver( ProxyConfigService* proxy_config_service, size_t num_pac_threads, - URLRequestContext* url_request_context, - NetLog* net_log, - MessageLoop* io_loop) { + ProxyScriptFetcher* proxy_script_fetcher, + HostResolver* host_resolver, + NetLog* net_log) { DCHECK(proxy_config_service); - DCHECK(url_request_context); - DCHECK(io_loop); + DCHECK(proxy_script_fetcher); + DCHECK(host_resolver); if (num_pac_threads == 0) num_pac_threads = kDefaultNumPacThreads; ProxyResolverFactory* sync_resolver_factory = new ProxyResolverFactoryForV8( - url_request_context->host_resolver(), - io_loop, + host_resolver, + MessageLoop::current(), net_log); ProxyResolver* proxy_resolver = @@ -413,9 +413,8 @@ ProxyService* ProxyService::CreateUsingV8ProxyResolver( ProxyService* proxy_service = new ProxyService(proxy_config_service, proxy_resolver, net_log); - // Configure PAC script downloads to be issued using |url_request_context|. - proxy_service->SetProxyScriptFetcher( - ProxyScriptFetcher::Create(url_request_context)); + // Configure PAC script downloads to be issued using |proxy_script_fetcher|. + proxy_service->SetProxyScriptFetcher(proxy_script_fetcher); return proxy_service; } diff --git a/net/proxy/proxy_service.h b/net/proxy/proxy_service.h index 951cd2a..8a161ea 100644 --- a/net/proxy/proxy_service.h +++ b/net/proxy/proxy_service.h @@ -25,6 +25,7 @@ class URLRequestContext; namespace net { +class HostResolver; class InitProxyResolver; class ProxyResolver; class ProxyScriptFetcher; @@ -153,9 +154,13 @@ class ProxyService : public base::RefCountedThreadSafe<ProxyService>, // (b) increases the memory used by proxy resolving, as each thread will // duplicate its own script context. - // |url_request_context| specifies the URL request context that will - // be used if a PAC script needs to be fetched. - // |io_loop| points to the IO thread's message loop. + // |proxy_script_fetcher| specifies the dependency to use for downloading + // any PAC scripts. The resulting ProxyService will take ownership of it. + // + // |host_resolver| points to the host resolving dependency the PAC script + // should use for any DNS queries. It must remain valid throughout the + // lifetime of the ProxyService. + // // ########################################################################## // # See the warnings in net/proxy/proxy_resolver_v8.h describing the // # multi-threading model. In order for this to be safe to use, *ALL* the @@ -164,9 +169,9 @@ class ProxyService : public base::RefCountedThreadSafe<ProxyService>, static ProxyService* CreateUsingV8ProxyResolver( ProxyConfigService* proxy_config_service, size_t num_pac_threads, - URLRequestContext* url_request_context, - NetLog* net_log, - MessageLoop* io_loop); + ProxyScriptFetcher* proxy_script_fetcher, + HostResolver* host_resolver, + NetLog* net_log); // Same as CreateUsingV8ProxyResolver, except it uses system libraries // for evaluating the PAC script if available, otherwise skips |