diff options
author | eroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-10-16 23:10:33 +0000 |
---|---|---|
committer | eroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-10-16 23:10:33 +0000 |
commit | 869336177906f183c223f304c8455b424d64b54f (patch) | |
tree | 44afa6f07a120acc8c383a857b9e504df74c04ae | |
parent | 93bd86a6b4678b9dfc5b263107a90131974ef529 (diff) | |
download | chromium_src-869336177906f183c223f304c8455b424d64b54f.zip chromium_src-869336177906f183c223f304c8455b424d64b54f.tar.gz chromium_src-869336177906f183c223f304c8455b424d64b54f.tar.bz2 |
Cleanup: Move the ProxyScriptFetcher registry from being a global in net, to living in IOThread.
I had to make some other changes to make this fit well: moved ProxyScriptFetcherImpl to its own set of files, and added a IOThread::Get() accessor to avoid plumbing through several layers in connection_tester.
I find the registry living in IOThread is preferable, as globals in net/ limit the ability to run on safely on multiple threads, and also leads to confusion on what needs to be called at shutdown.
Review URL: http://codereview.chromium.org/3823001
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@62876 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r-- | chrome/browser/dom_ui/net_internals_ui.cc | 2 | ||||
-rw-r--r-- | chrome/browser/io_thread.cc | 46 | ||||
-rw-r--r-- | chrome/browser/io_thread.h | 17 | ||||
-rw-r--r-- | chrome/browser/net/chrome_url_request_context.cc | 10 | ||||
-rw-r--r-- | chrome/browser/net/connection_tester.cc | 18 | ||||
-rw-r--r-- | chrome/browser/net/connection_tester.h | 6 | ||||
-rw-r--r-- | chrome/browser/net/connection_tester_unittest.cc | 7 | ||||
-rw-r--r-- | net/net.gyp | 5 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher.h | 24 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl.cc (renamed from net/proxy/proxy_script_fetcher.cc) | 166 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl.h | 119 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl_unittest.cc (renamed from net/proxy/proxy_script_fetcher_unittest.cc) | 52 | ||||
-rw-r--r-- | net/proxy/proxy_service.cc | 19 | ||||
-rw-r--r-- | net/proxy/proxy_service.h | 17 |
14 files changed, 277 insertions, 231 deletions
diff --git a/chrome/browser/dom_ui/net_internals_ui.cc b/chrome/browser/dom_ui/net_internals_ui.cc index 6502b70..908c8dad 100644 --- a/chrome/browser/dom_ui/net_internals_ui.cc +++ b/chrome/browser/dom_ui/net_internals_ui.cc @@ -814,7 +814,7 @@ void NetInternalsMessageHandler::IOThreadImpl::OnStartConnectionTests( // For example, turn "www.google.com" into "http://www.google.com". GURL url(URLFixerUpper::FixupURL(UTF16ToUTF8(url_str), std::string())); - connection_tester_.reset(new ConnectionTester(this)); + connection_tester_.reset(new ConnectionTester(this, io_thread_)); connection_tester_->RunAllTests(url); } diff --git a/chrome/browser/io_thread.cc b/chrome/browser/io_thread.cc index 8a18541..ae64b80 100644 --- a/chrome/browser/io_thread.cc +++ b/chrome/browser/io_thread.cc @@ -7,6 +7,7 @@ #include "base/command_line.h" #include "base/leak_tracker.h" #include "base/logging.h" +#include "base/stl_util-inl.h" #include "base/string_number_conversions.h" #include "base/string_split.h" #include "base/string_util.h" @@ -15,22 +16,22 @@ #include "chrome/browser/gpu_process_host.h" #include "chrome/browser/net/chrome_net_log.h" #include "chrome/browser/net/connect_interceptor.h" -#include "chrome/browser/net/predictor_api.h" #include "chrome/browser/net/passive_log_collector.h" +#include "chrome/browser/net/predictor_api.h" #include "chrome/common/chrome_switches.h" #include "chrome/common/net/url_fetcher.h" #include "net/base/dnsrr_resolver.h" -#include "net/base/mapped_host_resolver.h" #include "net/base/host_cache.h" #include "net/base/host_resolver.h" #include "net/base/host_resolver_impl.h" +#include "net/base/mapped_host_resolver.h" #include "net/base/net_util.h" #include "net/http/http_auth_filter.h" #include "net/http/http_auth_handler_factory.h" #if defined(USE_NSS) #include "net/ocsp/nss_ocsp.h" #endif // defined(USE_NSS) -#include "net/proxy/proxy_script_fetcher.h" +#include "net/proxy/proxy_script_fetcher_impl.h" namespace { @@ -113,6 +114,34 @@ class LoggingNetworkChangeObserver } // namespace +// This is a wrapper class around ProxyScriptFetcherImpl that will +// keep track of live instances. +class IOThread::ManagedProxyScriptFetcher + : public net::ProxyScriptFetcherImpl { + public: + ManagedProxyScriptFetcher(URLRequestContext* context, + IOThread* io_thread) + : net::ProxyScriptFetcherImpl(context), + io_thread_(io_thread) { + DCHECK(!ContainsKey(*fetchers(), this)); + fetchers()->insert(this); + } + + virtual ~ManagedProxyScriptFetcher() { + DCHECK(ContainsKey(*fetchers(), this)); + fetchers()->erase(this); + } + + private: + ProxyScriptFetchers* fetchers() { + return &io_thread_->fetchers_; + } + + IOThread* io_thread_; + + DISALLOW_COPY_AND_ASSIGN(ManagedProxyScriptFetcher); +}; + // The IOThread object must outlive any tasks posted to the IO thread before the // Quit task. DISABLE_RUNNABLE_METHOD_REFCOUNT(IOThread); @@ -165,6 +194,11 @@ void IOThread::ChangedToOnTheRecord() { &IOThread::ChangedToOnTheRecordOnIOThread)); } +net::ProxyScriptFetcher* IOThread::CreateAndRegisterProxyScriptFetcher( + URLRequestContext* url_request_context) { + return new ManagedProxyScriptFetcher(url_request_context, this); +} + void IOThread::Init() { BrowserProcessSubThread::Init(); @@ -221,7 +255,11 @@ void IOThread::CleanUp() { globals_->host_resolver.get()->GetAsHostResolverImpl()->Shutdown(); } - net::EnsureNoProxyScriptFetches(); + // Break any cycles between the ProxyScriptFetcher and URLRequestContext. + for (ProxyScriptFetchers::const_iterator it = fetchers_.begin(); + it != fetchers_.end(); ++it) { + (*it)->Cancel(); + } // We will delete the NetLog as part of CleanUpAfterMessageLoopDestruction() // in case any of the message loop destruction observers try to access it. diff --git a/chrome/browser/io_thread.h b/chrome/browser/io_thread.h index 79891c9..2baec87 100644 --- a/chrome/browser/io_thread.h +++ b/chrome/browser/io_thread.h @@ -6,6 +6,8 @@ #define CHROME_BROWSER_IO_THREAD_H_ #pragma once +#include <set> + #include "base/basictypes.h" #include "base/ref_counted.h" #include "base/scoped_ptr.h" @@ -16,6 +18,7 @@ class ChromeNetLog; class ListValue; +class URLRequestContext; namespace chrome_browser_net { class ConnectInterceptor; @@ -26,6 +29,7 @@ namespace net { class DnsRRResolver; class HostResolver; class HttpAuthHandlerFactory; +class ProxyScriptFetcher; class URLSecurityManager; } // namespace net @@ -66,12 +70,22 @@ class IOThread : public BrowserProcessSubThread { // IOThread's message loop. void ChangedToOnTheRecord(); + // Creates a ProxyScriptFetcherImpl which will be automatically aborted + // during shutdown. + // This is used to avoid cycles between the ProxyScriptFetcher and the + // URLRequestContext that owns it (indirectly via the ProxyService). + net::ProxyScriptFetcher* CreateAndRegisterProxyScriptFetcher( + URLRequestContext* url_request_context); + protected: virtual void Init(); virtual void CleanUp(); virtual void CleanUpAfterMessageLoopDestruction(); private: + class ManagedProxyScriptFetcher; + typedef std::set<ManagedProxyScriptFetcher*> ProxyScriptFetchers; + net::HttpAuthHandlerFactory* CreateDefaultAuthHandlerFactory( net::HostResolver* resolver); @@ -115,6 +129,9 @@ class IOThread : public BrowserProcessSubThread { chrome_browser_net::ConnectInterceptor* speculative_interceptor_; chrome_browser_net::Predictor* predictor_; + // List of live ProxyScriptFetchers. + ProxyScriptFetchers fetchers_; + DISALLOW_COPY_AND_ASSIGN(IOThread); }; diff --git a/chrome/browser/net/chrome_url_request_context.cc b/chrome/browser/net/chrome_url_request_context.cc index 0134717..cc1fe93 100644 --- a/chrome/browser/net/chrome_url_request_context.cc +++ b/chrome/browser/net/chrome_url_request_context.cc @@ -95,7 +95,7 @@ net::ProxyService* CreateProxyService( URLRequestContext* context, net::ProxyConfigService* proxy_config_service, const CommandLine& command_line, - MessageLoop* io_loop) { + IOThread* io_thread) { CheckCurrentlyOnIOThread(); bool use_v8 = !command_line.HasSwitch(switches::kWinHttpProxyResolver); @@ -126,9 +126,9 @@ net::ProxyService* CreateProxyService( return net::ProxyService::CreateUsingV8ProxyResolver( proxy_config_service, num_pac_threads, - context, - net_log, - io_loop); + io_thread->CreateAndRegisterProxyScriptFetcher(context), + context->host_resolver(), + net_log); } return net::ProxyService::CreateUsingSystemProxyResolver( @@ -271,7 +271,7 @@ ChromeURLRequestContext* FactoryForOriginal::Create() { context, proxy_config_service_.release(), command_line, - MessageLoop::current() /*io_loop*/)); + io_thread())); net::HttpCache::DefaultBackend* backend = new net::HttpCache::DefaultBackend( net::DISK_CACHE, disk_cache_path_, cache_size_, diff --git a/chrome/browser/net/connection_tester.cc b/chrome/browser/net/connection_tester.cc index 8658240..f8c7911 100644 --- a/chrome/browser/net/connection_tester.cc +++ b/chrome/browser/net/connection_tester.cc @@ -10,6 +10,7 @@ #include "base/message_loop.h" #include "base/utf_string_conversions.h" #include "chrome/browser/importer/firefox_proxy_settings.h" +#include "chrome/browser/io_thread.h" #include "chrome/common/chrome_switches.h" #include "net/base/cookie_monster.h" #include "net/base/dnsrr_resolver.h" @@ -37,6 +38,9 @@ namespace { // to the specified "experiment". class ExperimentURLRequestContext : public URLRequestContext { public: + explicit ExperimentURLRequestContext(IOThread* io_thread) + : io_thread_(io_thread) {} + int Init(const ConnectionTester::Experiment& experiment) { int rv; @@ -161,7 +165,10 @@ class ExperimentURLRequestContext : public URLRequestContext { *proxy_service = net::ProxyService::CreateUsingV8ProxyResolver( config_service.release(), - 0u, this, NULL, MessageLoop::current()); + 0u, + io_thread_->CreateAndRegisterProxyScriptFetcher(this), + host_resolver(), + NULL); return net::OK; } @@ -205,6 +212,8 @@ class ExperimentURLRequestContext : public URLRequestContext { return net::ERR_FAILED; } + + IOThread* io_thread_; }; } // namespace @@ -291,7 +300,7 @@ void ConnectionTester::TestRunner::OnResponseCompleted(URLRequest* request) { void ConnectionTester::TestRunner::Run(const Experiment& experiment) { // Try to create a URLRequestContext for this experiment. scoped_refptr<ExperimentURLRequestContext> context = - new ExperimentURLRequestContext(); + new ExperimentURLRequestContext(tester_->io_thread_); int rv = context->Init(experiment); if (rv != net::OK) { // Complete the experiment with a failure. @@ -307,9 +316,10 @@ void ConnectionTester::TestRunner::Run(const Experiment& experiment) { // ConnectionTester ---------------------------------------------------------- -ConnectionTester::ConnectionTester(Delegate* delegate) - : delegate_(delegate) { +ConnectionTester::ConnectionTester(Delegate* delegate, IOThread* io_thread) + : delegate_(delegate), io_thread_(io_thread) { DCHECK(delegate); + DCHECK(io_thread); } ConnectionTester::~ConnectionTester() { diff --git a/chrome/browser/net/connection_tester.h b/chrome/browser/net/connection_tester.h index 4118fbc..e0a7446 100644 --- a/chrome/browser/net/connection_tester.h +++ b/chrome/browser/net/connection_tester.h @@ -13,6 +13,8 @@ #include "googleurl/src/gurl.h" #include "net/base/completion_callback.h" +class IOThread; + // ConnectionTester runs a suite of tests (also called "experiments"), // to try and discover why loading a particular URL is failing with an error // code. @@ -123,7 +125,7 @@ class ConnectionTester { // Constructs a ConnectionTester that notifies test progress to |delegate|. // |delegate| is owned by the caller, and must remain valid for the lifetime // of ConnectionTester. - explicit ConnectionTester(Delegate* delegate); + ConnectionTester(Delegate* delegate, IOThread* io_thread); // Note that destruction cancels any in-progress tests. ~ConnectionTester(); @@ -169,6 +171,8 @@ class ConnectionTester { // of the list is the one currently in progress. ExperimentList remaining_experiments_; + IOThread* io_thread_; + DISALLOW_COPY_AND_ASSIGN(ConnectionTester); }; diff --git a/chrome/browser/net/connection_tester_unittest.cc b/chrome/browser/net/connection_tester_unittest.cc index 686c601..fe6e8cc 100644 --- a/chrome/browser/net/connection_tester_unittest.cc +++ b/chrome/browser/net/connection_tester_unittest.cc @@ -4,6 +4,7 @@ #include "chrome/browser/net/connection_tester.h" +#include "chrome/browser/io_thread.h" #include "net/base/mock_host_resolver.h" #include "net/test/test_server.h" #include "testing/gtest/include/gtest/gtest.h" @@ -88,12 +89,13 @@ class ConnectionTesterTest : public PlatformTest { net::TestServer test_server_; ConnectionTesterDelegate test_delegate_; MessageLoop message_loop_; + IOThread io_thread_; // Needed for creating ProxyScriptFetchers. }; TEST_F(ConnectionTesterTest, RunAllTests) { ASSERT_TRUE(test_server_.Start()); - ConnectionTester tester(&test_delegate_); + ConnectionTester tester(&test_delegate_, &io_thread_); // Start the test suite on URL "echoall". // TODO(eroman): Is this URL right? @@ -117,7 +119,8 @@ TEST_F(ConnectionTesterTest, RunAllTests) { TEST_F(ConnectionTesterTest, DeleteWhileInProgress) { ASSERT_TRUE(test_server_.Start()); - scoped_ptr<ConnectionTester> tester(new ConnectionTester(&test_delegate_)); + scoped_ptr<ConnectionTester> tester( + new ConnectionTester(&test_delegate_, &io_thread_)); // Start the test suite on URL "echoall". // TODO(eroman): Is this URL right? diff --git a/net/net.gyp b/net/net.gyp index 768cb6e..699f093 100644 --- a/net/net.gyp +++ b/net/net.gyp @@ -541,8 +541,9 @@ 'proxy/proxy_resolver_winhttp.cc', 'proxy/proxy_resolver_winhttp.h', 'proxy/proxy_retry_info.h', - 'proxy/proxy_script_fetcher.cc', 'proxy/proxy_script_fetcher.h', + 'proxy/proxy_script_fetcher_impl.cc', + 'proxy/proxy_script_fetcher_impl.h', 'proxy/proxy_server.cc', 'proxy/proxy_server_mac.cc', 'proxy/proxy_server.h', @@ -893,7 +894,7 @@ 'proxy/proxy_list_unittest.cc', 'proxy/proxy_resolver_js_bindings_unittest.cc', 'proxy/proxy_resolver_v8_unittest.cc', - 'proxy/proxy_script_fetcher_unittest.cc', + 'proxy/proxy_script_fetcher_impl_unittest.cc', 'proxy/proxy_server_unittest.cc', 'proxy/proxy_service_unittest.cc', 'proxy/sync_host_resolver_bridge_unittest.cc', diff --git a/net/proxy/proxy_script_fetcher.h b/net/proxy/proxy_script_fetcher.h index 3f2602f..0a28065 100644 --- a/net/proxy/proxy_script_fetcher.h +++ b/net/proxy/proxy_script_fetcher.h @@ -10,7 +10,6 @@ #define NET_PROXY_PROXY_SCRIPT_FETCHER_H_ #pragma once -#include "base/gtest_prod_util.h" #include "base/string16.h" #include "net/base/completion_callback.h" @@ -19,6 +18,8 @@ class URLRequestContext; namespace net { +// Interface for downloading a PAC script. Implementations can enforce +// timeouts, maximum size constraints, content encoding, etc.. class ProxyScriptFetcher { public: // Destruction should cancel any outstanding requests. @@ -48,29 +49,8 @@ class ProxyScriptFetcher { // Returns the request context that this fetcher uses to issue downloads, // or NULL. virtual URLRequestContext* GetRequestContext() { return NULL; } - - // Create a ProxyScriptFetcher that uses |url_request_context|. - static ProxyScriptFetcher* Create(URLRequestContext* url_request_context); - - // -------------------------------------------------------------------------- - // Testing helpers (only available to unit-tests). - // -------------------------------------------------------------------------- - private: - FRIEND_TEST_ALL_PREFIXES(ProxyScriptFetcherTest, Hang); - FRIEND_TEST_ALL_PREFIXES(ProxyScriptFetcherTest, TooLarge); - - // Sets the maximum duration for a fetch to |timeout_ms|. Returns the previous - // bound. - static int SetTimeoutConstraintForUnittest(int timeout_ms); - - // Sets the maximum response size for a fetch to |size_bytes|. Returns the - // previous bound. - static size_t SetSizeConstraintForUnittest(size_t size_bytes); }; -// Cancels all current proxy script fetches. -void EnsureNoProxyScriptFetches(); - } // namespace net #endif // NET_PROXY_PROXY_SCRIPT_FETCHER_H_ diff --git a/net/proxy/proxy_script_fetcher.cc b/net/proxy/proxy_script_fetcher_impl.cc index c920331..035622f 100644 --- a/net/proxy/proxy_script_fetcher.cc +++ b/net/proxy/proxy_script_fetcher_impl.cc @@ -2,40 +2,35 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "net/proxy/proxy_script_fetcher.h" +#include "net/proxy/proxy_script_fetcher_impl.h" -#include <set> - -#include "base/basictypes.h" #include "base/compiler_specific.h" #include "base/i18n/icu_string_conversions.h" -#include "base/lazy_instance.h" #include "base/logging.h" #include "base/message_loop.h" -#include "base/ref_counted.h" -#include "base/stl_util-inl.h" #include "base/string_util.h" -#include "base/utf_string_conversions.h" #include "net/base/io_buffer.h" #include "net/base/load_flags.h" #include "net/base/net_errors.h" #include "net/http/http_response_headers.h" -#include "net/url_request/url_request.h" +#include "net/url_request/url_request_context.h" // TODO(eroman): // - Support auth-prompts. +// TODO(eroman): Use a state machine rather than recursion. Recursion could +// lead to lots of frames. namespace net { namespace { // The maximum size (in bytes) allowed for a PAC script. Responses exceeding // this will fail with ERR_FILE_TOO_BIG. -int max_response_bytes = 1048576; // 1 megabyte +const int kDefaultMaxResponseBytes = 1048576; // 1 megabyte // The maximum duration (in milliseconds) allowed for fetching the PAC script. // Responses exceeding this will fail with ERR_TIMED_OUT. -int max_duration_ms = 300000; // 5 minutes +const int kDefaultMaxDurationMs = 300000; // 5 minutes // Returns true if |mime_type| is one of the known PAC mime type. bool IsPacMimeType(const std::string& mime_type) { @@ -74,124 +69,8 @@ void ConvertResponseToUTF16(const std::string& charset, utf16); } -class ProxyScriptFetcherTracker { - public: - void AddFetcher(ProxyScriptFetcher* fetcher) { - DCHECK(!ContainsKey(fetchers_, fetcher)); - fetchers_.insert(fetcher); - } - - void RemoveFetcher(ProxyScriptFetcher* fetcher) { - DCHECK(ContainsKey(fetchers_, fetcher)); - fetchers_.erase(fetcher); - } - - void CancelAllFetches() { - for (std::set<ProxyScriptFetcher*>::const_iterator it = fetchers_.begin(); - it != fetchers_.end(); ++it) { - (*it)->Cancel(); - } - } - - private: - ProxyScriptFetcherTracker(); - ~ProxyScriptFetcherTracker(); - - friend struct base::DefaultLazyInstanceTraits<ProxyScriptFetcherTracker>; - - std::set<ProxyScriptFetcher*> fetchers_; - DISALLOW_COPY_AND_ASSIGN(ProxyScriptFetcherTracker); -}; - -ProxyScriptFetcherTracker::ProxyScriptFetcherTracker() {} -ProxyScriptFetcherTracker::~ProxyScriptFetcherTracker() {} - -base::LazyInstance<ProxyScriptFetcherTracker> - g_fetcher_tracker(base::LINKER_INITIALIZED); - } // namespace -void EnsureNoProxyScriptFetches() { - g_fetcher_tracker.Get().CancelAllFetches(); -} - -class ProxyScriptFetcherImpl : public ProxyScriptFetcher, - public URLRequest::Delegate { - public: - // Creates a ProxyScriptFetcher that issues requests through - // |url_request_context|. |url_request_context| must remain valid for the - // lifetime of ProxyScriptFetcherImpl. - explicit ProxyScriptFetcherImpl(URLRequestContext* url_request_context); - - virtual ~ProxyScriptFetcherImpl(); - - // ProxyScriptFetcher methods: - - virtual int Fetch(const GURL& url, string16* text, - CompletionCallback* callback); - virtual void Cancel(); - virtual URLRequestContext* GetRequestContext(); - - // URLRequest::Delegate methods: - - virtual void OnAuthRequired(URLRequest* request, - AuthChallengeInfo* auth_info); - virtual void OnSSLCertificateError(URLRequest* request, int cert_error, - X509Certificate* cert); - virtual void OnResponseStarted(URLRequest* request); - virtual void OnReadCompleted(URLRequest* request, int num_bytes); - virtual void OnResponseCompleted(URLRequest* request); - - private: - // Read more bytes from the response. - void ReadBody(URLRequest* request); - - // Called once the request has completed to notify the caller of - // |response_code_| and |response_text_|. - void FetchCompleted(); - - // Clear out the state for the current request. - void ResetCurRequestState(); - - // Callback for time-out task of request with id |id|. - void OnTimeout(int id); - - // Factory for creating the time-out task. This takes care of revoking - // outstanding tasks when |this| is deleted. - ScopedRunnableMethodFactory<ProxyScriptFetcherImpl> task_factory_; - - // The context used for making network requests. - URLRequestContext* url_request_context_; - - // Buffer that URLRequest writes into. - enum { kBufSize = 4096 }; - scoped_refptr<net::IOBuffer> buf_; - - // The next ID to use for |cur_request_| (monotonically increasing). - int next_id_; - - // The current (in progress) request, or NULL. - scoped_ptr<URLRequest> cur_request_; - - // State for current request (only valid when |cur_request_| is not NULL): - - // Unique ID for the current request. - int cur_request_id_; - - // Callback to invoke on completion of the fetch. - CompletionCallback* callback_; - - // Holds the error condition that was hit on the current request, or OK. - int result_code_; - - // Holds the bytes read so far. Will not exceed |max_response_bytes|. - std::string bytes_read_so_far_; - - // This buffer is owned by the owner of |callback|, and will be filled with - // UTF16 response on completion. - string16* result_text_; -}; - ProxyScriptFetcherImpl::ProxyScriptFetcherImpl( URLRequestContext* url_request_context) : ALLOW_THIS_IN_INITIALIZER_LIST(task_factory_(this)), @@ -202,13 +81,13 @@ ProxyScriptFetcherImpl::ProxyScriptFetcherImpl( cur_request_id_(0), callback_(NULL), result_code_(OK), - result_text_(NULL) { + result_text_(NULL), + max_response_bytes_(kDefaultMaxResponseBytes), + max_duration_(base::TimeDelta::FromMilliseconds(kDefaultMaxDurationMs)) { DCHECK(url_request_context); - g_fetcher_tracker.Get().AddFetcher(this); } ProxyScriptFetcherImpl::~ProxyScriptFetcherImpl() { - g_fetcher_tracker.Get().RemoveFetcher(this); // The URLRequest's destructor will cancel the outstanding request, and // ensure that the delegate (this) is not called again. } @@ -244,7 +123,7 @@ int ProxyScriptFetcherImpl::Fetch(const GURL& url, MessageLoop::current()->PostDelayedTask(FROM_HERE, task_factory_.NewRunnableMethod(&ProxyScriptFetcherImpl::OnTimeout, cur_request_id_), - static_cast<int>(max_duration_ms)); + static_cast<int>(max_duration_.InMilliseconds())); // Start the request. cur_request_->Start(); @@ -320,7 +199,7 @@ void ProxyScriptFetcherImpl::OnReadCompleted(URLRequest* request, if (num_bytes > 0) { // Enforce maximum size bound. if (num_bytes + bytes_read_so_far_.size() > - static_cast<size_t>(max_response_bytes)) { + static_cast<size_t>(max_response_bytes_)) { result_code_ = ERR_FILE_TOO_BIG; request->Cancel(); return; @@ -391,26 +270,17 @@ void ProxyScriptFetcherImpl::OnTimeout(int id) { cur_request_->Cancel(); } -// static -ProxyScriptFetcher* ProxyScriptFetcher::Create( - URLRequestContext* url_request_context) { - return new ProxyScriptFetcherImpl(url_request_context); -} - -// static -int ProxyScriptFetcher::SetTimeoutConstraintForUnittest( - int timeout_ms) { - int prev = max_duration_ms; - max_duration_ms = timeout_ms; +base::TimeDelta ProxyScriptFetcherImpl::SetTimeoutConstraint( + base::TimeDelta timeout) { + base::TimeDelta prev = max_duration_; + max_duration_ = timeout; return prev; } -// static -size_t ProxyScriptFetcher::SetSizeConstraintForUnittest(size_t size_bytes) { - size_t prev = max_response_bytes; - max_response_bytes = size_bytes; +size_t ProxyScriptFetcherImpl::SetSizeConstraint(size_t size_bytes) { + size_t prev = max_response_bytes_; + max_response_bytes_ = size_bytes; return prev; } - } // namespace net diff --git a/net/proxy/proxy_script_fetcher_impl.h b/net/proxy/proxy_script_fetcher_impl.h new file mode 100644 index 0000000..7d2e1c6 --- /dev/null +++ b/net/proxy/proxy_script_fetcher_impl.h @@ -0,0 +1,119 @@ +// Copyright (c) 2010 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef NET_PROXY_PROXY_SCRIPT_FETCHER_IMPL_H_ +#define NET_PROXY_PROXY_SCRIPT_FETCHER_IMPL_H_ +#pragma once + +#include "base/basictypes.h" +#include "base/ref_counted.h" +#include "base/string16.h" +#include "base/task.h" +#include "base/time.h" +#include "net/proxy/proxy_script_fetcher.h" +#include "net/url_request/url_request.h" + +class GURL; +class URLRequestContext; +class X509Certificate; + +namespace net { + +// Implementation of ProxyScriptFetcher that downloads scripts using the +// specified request context. +class ProxyScriptFetcherImpl : public ProxyScriptFetcher, + public URLRequest::Delegate { + public: + // Creates a ProxyScriptFetcher that issues requests through + // |url_request_context|. |url_request_context| must remain valid for the + // lifetime of ProxyScriptFetcherImpl. + // Note that while a request is in progress, we will be holding a reference + // to |url_request_context|. Be careful not to create cycles between the + // fetcher and the context; you can break such cycles by calling Cancel(). + explicit ProxyScriptFetcherImpl(URLRequestContext* url_request_context); + + virtual ~ProxyScriptFetcherImpl(); + + // ProxyScriptFetcher methods: + + virtual int Fetch(const GURL& url, string16* text, + CompletionCallback* callback); + virtual void Cancel(); + virtual URLRequestContext* GetRequestContext(); + + // URLRequest::Delegate methods: + + virtual void OnAuthRequired(URLRequest* request, + AuthChallengeInfo* auth_info); + virtual void OnSSLCertificateError(URLRequest* request, int cert_error, + X509Certificate* cert); + virtual void OnResponseStarted(URLRequest* request); + virtual void OnReadCompleted(URLRequest* request, int num_bytes); + virtual void OnResponseCompleted(URLRequest* request); + + // Used by unit-tests to modify the default limits. + base::TimeDelta SetTimeoutConstraint(base::TimeDelta timeout); + size_t SetSizeConstraint(size_t size_bytes); + + private: + // Read more bytes from the response. + void ReadBody(URLRequest* request); + + // Called once the request has completed to notify the caller of + // |response_code_| and |response_text_|. + void FetchCompleted(); + + // Clear out the state for the current request. + void ResetCurRequestState(); + + // Callback for time-out task of request with id |id|. + void OnTimeout(int id); + + // Factory for creating the time-out task. This takes care of revoking + // outstanding tasks when |this| is deleted. + ScopedRunnableMethodFactory<ProxyScriptFetcherImpl> task_factory_; + + // The context used for making network requests. + URLRequestContext* url_request_context_; + + // Buffer that URLRequest writes into. + enum { kBufSize = 4096 }; + scoped_refptr<net::IOBuffer> buf_; + + // The next ID to use for |cur_request_| (monotonically increasing). + int next_id_; + + // The current (in progress) request, or NULL. + scoped_ptr<URLRequest> cur_request_; + + // State for current request (only valid when |cur_request_| is not NULL): + + // Unique ID for the current request. + int cur_request_id_; + + // Callback to invoke on completion of the fetch. + CompletionCallback* callback_; + + // Holds the error condition that was hit on the current request, or OK. + int result_code_; + + // Holds the bytes read so far. Will not exceed |max_response_bytes|. + std::string bytes_read_so_far_; + + // This buffer is owned by the owner of |callback|, and will be filled with + // UTF16 response on completion. + string16* result_text_; + + // The maximum number of bytes to allow in responses. + size_t max_response_bytes_; + + // The maximum amount of time to wait for download to complete. + base::TimeDelta max_duration_; + + DISALLOW_COPY_AND_ASSIGN(ProxyScriptFetcherImpl); +}; + +} // namespace net + +#endif // NET_PROXY_PROXY_SCRIPT_FETCHER_IMPL_H_ diff --git a/net/proxy/proxy_script_fetcher_unittest.cc b/net/proxy/proxy_script_fetcher_impl_unittest.cc index ce54a9b..2634f99 100644 --- a/net/proxy/proxy_script_fetcher_unittest.cc +++ b/net/proxy/proxy_script_fetcher_impl_unittest.cc @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "net/proxy/proxy_script_fetcher.h" +#include "net/proxy/proxy_script_fetcher_impl.h" #include "base/file_path.h" #include "base/compiler_specific.h" @@ -67,9 +67,9 @@ GURL GetTestFileUrl(const std::string& relpath) { return GURL(base_url.spec() + "/" + relpath); } -class ProxyScriptFetcherTest : public PlatformTest { +class ProxyScriptFetcherImplTest : public PlatformTest { public: - ProxyScriptFetcherTest() + ProxyScriptFetcherImplTest() : test_server_(net::TestServer::TYPE_HTTP, FilePath(kDocRoot)) { } @@ -77,10 +77,10 @@ class ProxyScriptFetcherTest : public PlatformTest { net::TestServer test_server_; }; -TEST_F(ProxyScriptFetcherTest, FileUrl) { +TEST_F(ProxyScriptFetcherImplTest, FileUrl) { scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); { // Fetch a non-existent file. string16 text; @@ -104,12 +104,12 @@ TEST_F(ProxyScriptFetcherTest, FileUrl) { // Note that all mime types are allowed for PAC file, to be consistent // with other browsers. -TEST_F(ProxyScriptFetcherTest, HttpMimeType) { +TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); { // Fetch a PAC with mime type "text/plain" GURL url(test_server_.GetURL("files/pac.txt")); @@ -140,12 +140,12 @@ TEST_F(ProxyScriptFetcherTest, HttpMimeType) { } } -TEST_F(ProxyScriptFetcherTest, HttpStatusCode) { +TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); { // Fetch a PAC which gives a 500 -- FAIL GURL url(test_server_.GetURL("files/500.pac")); @@ -167,12 +167,12 @@ TEST_F(ProxyScriptFetcherTest, HttpStatusCode) { } } -TEST_F(ProxyScriptFetcherTest, ContentDisposition) { +TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); // Fetch PAC scripts via HTTP with a Content-Disposition header -- should // have no effect. @@ -185,12 +185,12 @@ TEST_F(ProxyScriptFetcherTest, ContentDisposition) { EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text); } -TEST_F(ProxyScriptFetcherTest, NoCache) { +TEST_F(ProxyScriptFetcherImplTest, NoCache) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour. GURL url(test_server_.GetURL("files/cacheable_1hr.pac")); @@ -218,15 +218,15 @@ TEST_F(ProxyScriptFetcherTest, NoCache) { } } -TEST_F(ProxyScriptFetcherTest, TooLarge) { +TEST_F(ProxyScriptFetcherImplTest, TooLarge) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; - scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + scoped_ptr<ProxyScriptFetcherImpl> pac_fetcher( + new ProxyScriptFetcherImpl(context)); // Set the maximum response size to 50 bytes. - int prev_size = ProxyScriptFetcher::SetSizeConstraintForUnittest(50); + int prev_size = pac_fetcher->SetSizeConstraint(50); // These two URLs are the same file, but are http:// vs file:// GURL urls[] = { @@ -247,7 +247,7 @@ TEST_F(ProxyScriptFetcherTest, TooLarge) { } // Restore the original size bound. - ProxyScriptFetcher::SetSizeConstraintForUnittest(prev_size); + pac_fetcher->SetSizeConstraint(prev_size); { // Make sure we can still fetch regular URLs. GURL url(test_server_.GetURL("files/pac.nsproxy")); @@ -260,16 +260,16 @@ TEST_F(ProxyScriptFetcherTest, TooLarge) { } } -TEST_F(ProxyScriptFetcherTest, Hang) { +TEST_F(ProxyScriptFetcherImplTest, Hang) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; - scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + scoped_ptr<ProxyScriptFetcherImpl> pac_fetcher( + new ProxyScriptFetcherImpl(context)); // Set the timeout period to 0.5 seconds. - int prev_timeout = - ProxyScriptFetcher::SetTimeoutConstraintForUnittest(500); + base::TimeDelta prev_timeout = pac_fetcher->SetTimeoutConstraint( + base::TimeDelta::FromMilliseconds(500)); // Try fetching a URL which takes 1.2 seconds. We should abort the request // after 500 ms, and fail with a timeout error. @@ -283,7 +283,7 @@ TEST_F(ProxyScriptFetcherTest, Hang) { } // Restore the original timeout period. - ProxyScriptFetcher::SetTimeoutConstraintForUnittest(prev_timeout); + pac_fetcher->SetTimeoutConstraint(prev_timeout); { // Make sure we can still fetch regular URLs. GURL url(test_server_.GetURL("files/pac.nsproxy")); @@ -299,12 +299,12 @@ TEST_F(ProxyScriptFetcherTest, Hang) { // The ProxyScriptFetcher should decode any content-codings // (like gzip, bzip, etc.), and apply any charset conversions to yield // UTF8. -TEST_F(ProxyScriptFetcherTest, Encodings) { +TEST_F(ProxyScriptFetcherImplTest, Encodings) { ASSERT_TRUE(test_server_.Start()); scoped_refptr<URLRequestContext> context = new RequestContext; scoped_ptr<ProxyScriptFetcher> pac_fetcher( - ProxyScriptFetcher::Create(context)); + new ProxyScriptFetcherImpl(context)); // Test a response that is gzip-encoded -- should get inflated. { diff --git a/net/proxy/proxy_service.cc b/net/proxy/proxy_service.cc index 4c27170..db22cc1 100644 --- a/net/proxy/proxy_service.cc +++ b/net/proxy/proxy_service.cc @@ -391,20 +391,20 @@ ProxyService::ProxyService(ProxyConfigService* config_service, ProxyService* ProxyService::CreateUsingV8ProxyResolver( ProxyConfigService* proxy_config_service, size_t num_pac_threads, - URLRequestContext* url_request_context, - NetLog* net_log, - MessageLoop* io_loop) { + ProxyScriptFetcher* proxy_script_fetcher, + HostResolver* host_resolver, + NetLog* net_log) { DCHECK(proxy_config_service); - DCHECK(url_request_context); - DCHECK(io_loop); + DCHECK(proxy_script_fetcher); + DCHECK(host_resolver); if (num_pac_threads == 0) num_pac_threads = kDefaultNumPacThreads; ProxyResolverFactory* sync_resolver_factory = new ProxyResolverFactoryForV8( - url_request_context->host_resolver(), - io_loop, + host_resolver, + MessageLoop::current(), net_log); ProxyResolver* proxy_resolver = @@ -413,9 +413,8 @@ ProxyService* ProxyService::CreateUsingV8ProxyResolver( ProxyService* proxy_service = new ProxyService(proxy_config_service, proxy_resolver, net_log); - // Configure PAC script downloads to be issued using |url_request_context|. - proxy_service->SetProxyScriptFetcher( - ProxyScriptFetcher::Create(url_request_context)); + // Configure PAC script downloads to be issued using |proxy_script_fetcher|. + proxy_service->SetProxyScriptFetcher(proxy_script_fetcher); return proxy_service; } diff --git a/net/proxy/proxy_service.h b/net/proxy/proxy_service.h index 951cd2a..8a161ea 100644 --- a/net/proxy/proxy_service.h +++ b/net/proxy/proxy_service.h @@ -25,6 +25,7 @@ class URLRequestContext; namespace net { +class HostResolver; class InitProxyResolver; class ProxyResolver; class ProxyScriptFetcher; @@ -153,9 +154,13 @@ class ProxyService : public base::RefCountedThreadSafe<ProxyService>, // (b) increases the memory used by proxy resolving, as each thread will // duplicate its own script context. - // |url_request_context| specifies the URL request context that will - // be used if a PAC script needs to be fetched. - // |io_loop| points to the IO thread's message loop. + // |proxy_script_fetcher| specifies the dependency to use for downloading + // any PAC scripts. The resulting ProxyService will take ownership of it. + // + // |host_resolver| points to the host resolving dependency the PAC script + // should use for any DNS queries. It must remain valid throughout the + // lifetime of the ProxyService. + // // ########################################################################## // # See the warnings in net/proxy/proxy_resolver_v8.h describing the // # multi-threading model. In order for this to be safe to use, *ALL* the @@ -164,9 +169,9 @@ class ProxyService : public base::RefCountedThreadSafe<ProxyService>, static ProxyService* CreateUsingV8ProxyResolver( ProxyConfigService* proxy_config_service, size_t num_pac_threads, - URLRequestContext* url_request_context, - NetLog* net_log, - MessageLoop* io_loop); + ProxyScriptFetcher* proxy_script_fetcher, + HostResolver* host_resolver, + NetLog* net_log); // Same as CreateUsingV8ProxyResolver, except it uses system libraries // for evaluating the PAC script if available, otherwise skips |