diff options
author | rdsmith@chromium.org <rdsmith@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-06-14 02:49:13 +0000 |
---|---|---|
committer | rdsmith@chromium.org <rdsmith@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-06-14 02:49:13 +0000 |
commit | 0c2207220bb2b4d9c927182c0aa520e2837bae5a (patch) | |
tree | 3096573479b3a07e3dcfbab50a0e5f234914a91c /net/url_request | |
parent | 3e0685de6c31971993b80c2d24777d5d368bcf4b (diff) | |
download | chromium_src-0c2207220bb2b4d9c927182c0aa520e2837bae5a.zip chromium_src-0c2207220bb2b4d9c927182c0aa520e2837bae5a.tar.gz chromium_src-0c2207220bb2b4d9c927182c0aa520e2837bae5a.tar.bz2 |
Make SdchManager per-profile.
This will both allow SDCH dictionaries to be cached (as they can use the
cache associated with the profile) and will provide privacy protection
between different profiles (the existing of a dictionary in one profile
will not be leaked to another profile).
BUG=374914
R=jar@chromium.org
Review URL: https://codereview.chromium.org/298063006
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@277160 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r-- | net/url_request/url_request_context.cc | 2 | ||||
-rw-r--r-- | net/url_request/url_request_context.h | 10 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.cc | 44 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.h | 3 |
4 files changed, 31 insertions, 28 deletions
diff --git a/net/url_request/url_request_context.cc b/net/url_request/url_request_context.cc index d6a4d2f..3a9f69d 100644 --- a/net/url_request/url_request_context.cc +++ b/net/url_request/url_request_context.cc @@ -31,6 +31,7 @@ URLRequestContext::URLRequestContext() http_transaction_factory_(NULL), job_factory_(NULL), throttler_manager_(NULL), + sdch_manager_(NULL), url_requests_(new std::set<const URLRequest*>) { } @@ -56,6 +57,7 @@ void URLRequestContext::CopyFrom(const URLRequestContext* other) { set_http_transaction_factory(other->http_transaction_factory_); set_job_factory(other->job_factory_); set_throttler_manager(other->throttler_manager_); + set_sdch_manager(other->sdch_manager_); set_http_user_agent_settings(other->http_user_agent_settings_); } diff --git a/net/url_request/url_request_context.h b/net/url_request/url_request_context.h index a05c9ae..4242d72 100644 --- a/net/url_request/url_request_context.h +++ b/net/url_request/url_request_context.h @@ -36,6 +36,7 @@ class HttpAuthHandlerFactory; class HttpTransactionFactory; class HttpUserAgentSettings; class NetworkDelegate; +class SdchManager; class ServerBoundCertService; class ProxyService; class URLRequest; @@ -184,6 +185,14 @@ class NET_EXPORT URLRequestContext throttler_manager_ = throttler_manager; } + // May be NULL. + SdchManager* sdch_manager() const { + return sdch_manager_; + } + void set_sdch_manager(SdchManager* sdch_manager) { + sdch_manager_ = sdch_manager; + } + // Gets the URLRequest objects that hold a reference to this // URLRequestContext. std::set<const URLRequest*>* url_requests() const { @@ -227,6 +236,7 @@ class NET_EXPORT URLRequestContext HttpTransactionFactory* http_transaction_factory_; const URLRequestJobFactory* job_factory_; URLRequestThrottlerManager* throttler_manager_; + SdchManager* sdch_manager_; // --------------------------------------------------------------------------- // Important: When adding any new members below, consider whether they need to diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc index 776c5b1..4b7d4e0 100644 --- a/net/url_request/url_request_http_job.cc +++ b/net/url_request/url_request_http_job.cc @@ -66,6 +66,7 @@ class URLRequestHttpJob::HttpFilterContext : public FilterContext { virtual bool IsSdchResponse() const OVERRIDE; virtual int64 GetByteReadCount() const OVERRIDE; virtual int GetResponseCode() const OVERRIDE; + virtual const URLRequestContext* GetURLRequestContext() const OVERRIDE; virtual void RecordPacketStats(StatisticSelector statistic) const OVERRIDE; // Method to allow us to reset filter context for a response that should have @@ -134,6 +135,11 @@ int URLRequestHttpJob::HttpFilterContext::GetResponseCode() const { return job_->GetResponseCode(); } +const URLRequestContext* +URLRequestHttpJob::HttpFilterContext::GetURLRequestContext() const { + return job_->request() ? job_->request()->context() : NULL; +} + void URLRequestHttpJob::HttpFilterContext::RecordPacketStats( StatisticSelector statistic) const { job_->RecordPacketStats(statistic); @@ -220,20 +226,6 @@ URLRequestHttpJob::~URLRequestHttpJob() { // filter_context_ is still alive. DestroyFilters(); - if (sdch_dictionary_url_.is_valid()) { - // Prior to reaching the destructor, request_ has been set to a NULL - // pointer, so request_->url() is no longer valid in the destructor, and we - // use an alternate copy |request_info_.url|. - SdchManager* manager = SdchManager::Global(); - // To be extra safe, since this is a "different time" from when we decided - // to get the dictionary, we'll validate that an SdchManager is available. - // At shutdown time, care is taken to be sure that we don't delete this - // globally useful instance "too soon," so this check is just defensive - // coding to assure that IF the system is shutting down, we don't have any - // problem if the manager was deleted ahead of time. - if (manager) // Defensive programming. - manager->FetchDictionary(request_info_.url, sdch_dictionary_url_); - } DoneWithRequest(ABORTED); } @@ -313,8 +305,8 @@ void URLRequestHttpJob::NotifyHeadersComplete() { ProcessStrictTransportSecurityHeader(); ProcessPublicKeyPinsHeader(); - if (SdchManager::Global() && - SdchManager::Global()->IsInSupportedDomain(request_->url())) { + SdchManager* sdch_manager(request()->context()->sdch_manager()); + if (sdch_manager && sdch_manager->IsInSupportedDomain(request_->url())) { const std::string name = "Get-Dictionary"; std::string url_text; void* iter = NULL; @@ -325,11 +317,11 @@ void URLRequestHttpJob::NotifyHeadersComplete() { // Eventually we should wait until a dictionary is requested several times // before we even download it (so that we don't waste memory or bandwidth). if (GetResponseHeaders()->EnumerateHeader(&iter, name, &url_text)) { - // request_->url() won't be valid in the destructor, so we use an - // alternate copy. - DCHECK_EQ(request_->url(), request_info_.url); // Resolve suggested URL relative to request url. - sdch_dictionary_url_ = request_info_.url.Resolve(url_text); + GURL sdch_dictionary_url = request_->url().Resolve(url_text); + if (sdch_dictionary_url.is_valid()) { + sdch_manager->FetchDictionary(request_->url(), sdch_dictionary_url); + } } } @@ -463,6 +455,8 @@ void URLRequestHttpJob::StartTransactionInternal() { } void URLRequestHttpJob::AddExtraHeaders() { + SdchManager* sdch_manager = request()->context()->sdch_manager(); + // Supply Accept-Encoding field only if it is not already provided. // It should be provided IF the content is known to have restrictions on // potential encoding, such as streaming multi-media. @@ -472,24 +466,24 @@ void URLRequestHttpJob::AddExtraHeaders() { // simple_data_source. if (!request_info_.extra_headers.HasHeader( HttpRequestHeaders::kAcceptEncoding)) { - bool advertise_sdch = SdchManager::Global() && + bool advertise_sdch = sdch_manager && // We don't support SDCH responses to POST as there is a possibility // of having SDCH encoded responses returned (e.g. by the cache) // which we cannot decode, and in those situations, we will need // to retransmit the request without SDCH, which is illegal for a POST. request()->method() != "POST" && - SdchManager::Global()->IsInSupportedDomain(request_->url()); + sdch_manager->IsInSupportedDomain(request_->url()); std::string avail_dictionaries; if (advertise_sdch) { - SdchManager::Global()->GetAvailDictionaryList(request_->url(), - &avail_dictionaries); + sdch_manager->GetAvailDictionaryList(request_->url(), + &avail_dictionaries); // The AllowLatencyExperiment() is only true if we've successfully done a // full SDCH compression recently in this browser session for this host. // Note that for this path, there might be no applicable dictionaries, // and hence we can't participate in the experiment. if (!avail_dictionaries.empty() && - SdchManager::Global()->AllowLatencyExperiment(request_->url())) { + sdch_manager->AllowLatencyExperiment(request_->url())) { // We are participating in the test (or control), and hence we'll // eventually record statistics via either SDCH_EXPERIMENT_DECODE or // SDCH_EXPERIMENT_HOLDBACK, and we'll need some packet timing data. diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h index b7ac50c..d38b462 100644 --- a/net/url_request/url_request_http_job.h +++ b/net/url_request/url_request_http_job.h @@ -190,9 +190,6 @@ class NET_EXPORT_PRIVATE URLRequestHttpJob : public URLRequestJob { bool read_in_progress_; - // An URL for an SDCH dictionary as suggested in a Get-Dictionary HTTP header. - GURL sdch_dictionary_url_; - scoped_ptr<HttpTransaction> transaction_; // This is used to supervise traffic and enforce exponential |