diff options
author | jar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-05-19 23:39:03 +0000 |
---|---|---|
committer | jar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-05-19 23:39:03 +0000 |
commit | 284c373d43107741d1bad8c118b8e88774d533be (patch) | |
tree | a7d8ba14302b155129cbc9dd8426f7d9bffc6214 | |
parent | c2b67266ba9a8ac0bc22a3c67966fe008dd2a20c (diff) | |
download | chromium_src-284c373d43107741d1bad8c118b8e88774d533be.zip chromium_src-284c373d43107741d1bad8c118b8e88774d533be.tar.gz chromium_src-284c373d43107741d1bad8c118b8e88774d533be.tar.bz2 |
Cleanup code to keep flag showing "was cached" only in URLRequestHttpJob
r=wtc
Review URL: http://codereview.chromium.org/113535
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@16435 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r-- | net/base/filter.cc | 4 | ||||
-rw-r--r-- | net/base/sdch_filter.cc | 9 | ||||
-rw-r--r-- | net/base/sdch_filter.h | 5 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.cc | 3 | ||||
-rw-r--r-- | net/url_request/url_request_job.cc | 4 | ||||
-rw-r--r-- | net/url_request/url_request_job.h | 14 |
6 files changed, 19 insertions, 20 deletions
diff --git a/net/base/filter.cc b/net/base/filter.cc index 73cafd4..d12124c 100644 --- a/net/base/filter.cc +++ b/net/base/filter.cc @@ -39,7 +39,7 @@ const char kTextHtml[] = "text/html"; Filter* Filter::Factory(const std::vector<FilterType>& filter_types, const FilterContext& filter_context) { - DCHECK(filter_context.GetInputStreamBufferSize() > 0); + DCHECK_GT(filter_context.GetInputStreamBufferSize(), 0); if (filter_types.empty() || filter_context.GetInputStreamBufferSize() <= 0) return NULL; @@ -263,7 +263,7 @@ Filter::~Filter() {} bool Filter::InitBuffer() { int buffer_size = filter_context_.GetInputStreamBufferSize(); - DCHECK(buffer_size > 0); + DCHECK_GT(buffer_size, 0); if (buffer_size <= 0 || stream_buffer()) return false; diff --git a/net/base/sdch_filter.cc b/net/base/sdch_filter.cc index 2c747bc..9ddac28 100644 --- a/net/base/sdch_filter.cc +++ b/net/base/sdch_filter.cc @@ -25,8 +25,7 @@ SdchFilter::SdchFilter(const FilterContext& filter_context) dest_buffer_excess_index_(0), source_bytes_(0), output_bytes_(0), - possible_pass_through_(false), - was_cached_(filter_context.IsCachedContent()) { + possible_pass_through_(false) { bool success = filter_context.GetMimeType(&mime_type_); DCHECK(success); success = filter_context.GetURL(&url_); @@ -69,7 +68,7 @@ SdchFilter::~SdchFilter() { UMA_HISTOGRAM_COUNTS("Sdch3.UnflushedVcdiffOut", output_bytes_); } - if (was_cached_) { + if (filter_context().IsCachedContent()) { // Not a real error, but it is useful to have this tally. // TODO(jar): Remove this stat after SDCH stability is validated. SdchManager::SdchErrorRecovery(SdchManager::CACHE_DECODED); @@ -196,7 +195,7 @@ Filter::FilterStatus SdchFilter::ReadFilteredData(char* dest_buffer, // Since we can't do a meta-refresh (along with an exponential // backoff), we'll just make sure this NEVER happens again. SdchManager::BlacklistDomainForever(url_); - if (was_cached_) + if (filter_context().IsCachedContent()) SdchManager::SdchErrorRecovery( SdchManager::CACHED_META_REFRESH_UNSUPPORTED); else @@ -206,7 +205,7 @@ Filter::FilterStatus SdchFilter::ReadFilteredData(char* dest_buffer, } // HTML content means we can issue a meta-refresh, and get the content // again, perhaps without SDCH (to be safe). - if (was_cached_) { + if (filter_context().IsCachedContent()) { // Cached content is probably a startup tab, so we'll just get fresh // content and try again, without disabling sdch. SdchManager::SdchErrorRecovery( diff --git a/net/base/sdch_filter.h b/net/base/sdch_filter.h index 7255839..012de89 100644 --- a/net/base/sdch_filter.h +++ b/net/base/sdch_filter.h @@ -111,11 +111,6 @@ class SdchFilter : public Filter { // This is used to restrict use of a dictionary to a specific URL or path. GURL url_; - // To facilitate error recovery, we store whether this content came from a - // cache, as we then probably don't have the requsite dictionary, and will - // need to induce a meta-refresh. - const bool was_cached_; - // To facilitate error recovery, allow filter to know if content is text/html // by checking within this mime type (we may do a meta-refresh via html). std::string mime_type_; diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc index 143b0a8..a86edbd 100644 --- a/net/url_request/url_request_http_job.cc +++ b/net/url_request/url_request_http_job.cc @@ -79,6 +79,9 @@ URLRequestHttpJob::~URLRequestHttpJob() { if (sdch_test_activated_) RecordPacketStats(SDCH_EXPERIMENT_DECODE); } + // Make sure SDCH filters are told to emit histogram data while this class + // can still service the IsCachedContent() call. + DestroyFilters(); if (sdch_dictionary_url_.is_valid()) { // Prior to reaching the destructor, request_ has been set to a NULL diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc index 088bf50..3d7df3e 100644 --- a/net/url_request/url_request_job.cc +++ b/net/url_request/url_request_job.cc @@ -45,10 +45,6 @@ URLRequestJob::URLRequestJob(URLRequest* request) } URLRequestJob::~URLRequestJob() { - // Cause filter chain to be destroyed now, so that any histogram requests can - // be made before we are torn down. - filter_.reset(NULL); - g_url_request_job_tracker.RemoveJob(this); } diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h index e76bfaa..301bb3b 100644 --- a/net/url_request/url_request_job.h +++ b/net/url_request/url_request_job.h @@ -265,6 +265,16 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>, // the hood. bool ReadFilteredData(int *bytes_read); + // Facilitate histogramming by turning on packet counting. + // If called more than once, the largest value will be used. + void EnablePacketCounting(size_t max_packets_timed); + + // At or near destruction time, a derived class may request that the filters + // be destroyed so that statistics can be gathered while the derived class is + // still present to assist in calculations. This is used by URLRequestHttpJob + // to get SDCH to emit stats. + void DestroyFilters() { filter_.reset(); } + // The request that initiated this job. This value MAY BE NULL if the // request was released by DetachRequest(). URLRequest* request_; @@ -281,10 +291,6 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>, // Contains IO performance measurement when profiling is enabled. scoped_ptr<URLRequestJobMetrics> metrics_; - // Facilitate histogramming by turning on packet counting. - // If called more than once, the largest value will be used. - void EnablePacketCounting(size_t max_packets_timed); - private: // Size of filter input buffers used by this class. static const int kFilterBufSize; |