summaryrefslogtreecommitdiffstats
path: root/net/url_request
diff options
context:
space:
mode:
authorjar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-05-19 23:39:03 +0000
committerjar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-05-19 23:39:03 +0000
commit284c373d43107741d1bad8c118b8e88774d533be (patch)
treea7d8ba14302b155129cbc9dd8426f7d9bffc6214 /net/url_request
parentc2b67266ba9a8ac0bc22a3c67966fe008dd2a20c (diff)
downloadchromium_src-284c373d43107741d1bad8c118b8e88774d533be.zip
chromium_src-284c373d43107741d1bad8c118b8e88774d533be.tar.gz
chromium_src-284c373d43107741d1bad8c118b8e88774d533be.tar.bz2
Cleanup code to keep flag showing "was cached" only in URLRequestHttpJob
r=wtc Review URL: http://codereview.chromium.org/113535 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@16435 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r--net/url_request/url_request_http_job.cc3
-rw-r--r--net/url_request/url_request_job.cc4
-rw-r--r--net/url_request/url_request_job.h14
3 files changed, 13 insertions, 8 deletions
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
index 143b0a8..a86edbd 100644
--- a/net/url_request/url_request_http_job.cc
+++ b/net/url_request/url_request_http_job.cc
@@ -79,6 +79,9 @@ URLRequestHttpJob::~URLRequestHttpJob() {
if (sdch_test_activated_)
RecordPacketStats(SDCH_EXPERIMENT_DECODE);
}
+ // Make sure SDCH filters are told to emit histogram data while this class
+ // can still service the IsCachedContent() call.
+ DestroyFilters();
if (sdch_dictionary_url_.is_valid()) {
// Prior to reaching the destructor, request_ has been set to a NULL
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc
index 088bf50..3d7df3e 100644
--- a/net/url_request/url_request_job.cc
+++ b/net/url_request/url_request_job.cc
@@ -45,10 +45,6 @@ URLRequestJob::URLRequestJob(URLRequest* request)
}
URLRequestJob::~URLRequestJob() {
- // Cause filter chain to be destroyed now, so that any histogram requests can
- // be made before we are torn down.
- filter_.reset(NULL);
-
g_url_request_job_tracker.RemoveJob(this);
}
diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h
index e76bfaa..301bb3b 100644
--- a/net/url_request/url_request_job.h
+++ b/net/url_request/url_request_job.h
@@ -265,6 +265,16 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>,
// the hood.
bool ReadFilteredData(int *bytes_read);
+ // Facilitate histogramming by turning on packet counting.
+ // If called more than once, the largest value will be used.
+ void EnablePacketCounting(size_t max_packets_timed);
+
+ // At or near destruction time, a derived class may request that the filters
+ // be destroyed so that statistics can be gathered while the derived class is
+ // still present to assist in calculations. This is used by URLRequestHttpJob
+ // to get SDCH to emit stats.
+ void DestroyFilters() { filter_.reset(); }
+
// The request that initiated this job. This value MAY BE NULL if the
// request was released by DetachRequest().
URLRequest* request_;
@@ -281,10 +291,6 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>,
// Contains IO performance measurement when profiling is enabled.
scoped_ptr<URLRequestJobMetrics> metrics_;
- // Facilitate histogramming by turning on packet counting.
- // If called more than once, the largest value will be used.
- void EnablePacketCounting(size_t max_packets_timed);
-
private:
// Size of filter input buffers used by this class.
static const int kFilterBufSize;