diff options
author | jar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-05-15 01:06:53 +0000 |
---|---|---|
committer | jar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-05-15 01:06:53 +0000 |
commit | d8fd513cd610568904212fa0d2a0de523d85a2ff (patch) | |
tree | 2025a97ba0925f3b3494baaa30e3dfe186b8f199 /net/url_request | |
parent | 96c77a6d2bf1e573e48748b54c8bf3c7aa4c9d96 (diff) | |
download | chromium_src-d8fd513cd610568904212fa0d2a0de523d85a2ff.zip chromium_src-d8fd513cd610568904212fa0d2a0de523d85a2ff.tar.gz chromium_src-d8fd513cd610568904212fa0d2a0de523d85a2ff.tar.bz2 |
Adjust histograms to only collect non-cache stats for SDCH.
I also changed the name of all the SDCH histograms so that
there would be no confusion about the data.
I also added one new problem code, but that recordation
doesn't have any semantic impact. I added it to help
make it easier to diagnose dictionary specification
errors in SDCH.
bug=12012
R=wtc
Review URL: http://codereview.chromium.org/115377
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@16138 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r-- | net/url_request/url_request_http_job.cc | 17 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.h | 4 | ||||
-rw-r--r-- | net/url_request/url_request_job.cc | 60 | ||||
-rw-r--r-- | net/url_request/url_request_job.h | 2 |
4 files changed, 50 insertions, 33 deletions
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc index 9c81e00..143b0a8 100644 --- a/net/url_request/url_request_http_job.cc +++ b/net/url_request/url_request_http_job.cc @@ -67,15 +67,18 @@ URLRequestHttpJob::URLRequestHttpJob(URLRequest* request) context_(request->context()), sdch_dictionary_advertised_(false), sdch_test_activated_(false), - sdch_test_control_(false) { + sdch_test_control_(false), + is_cached_content_(false) { } URLRequestHttpJob::~URLRequestHttpJob() { DCHECK(!sdch_test_control_ || !sdch_test_activated_); - if (sdch_test_control_) - RecordPacketStats(SDCH_EXPERIMENT_HOLDBACK); - if (sdch_test_activated_) - RecordPacketStats(SDCH_EXPERIMENT_DECODE); + if (!IsCachedContent()) { + if (sdch_test_control_) + RecordPacketStats(SDCH_EXPERIMENT_HOLDBACK); + if (sdch_test_activated_) + RecordPacketStats(SDCH_EXPERIMENT_DECODE); + } if (sdch_dictionary_url_.is_valid()) { // Prior to reaching the destructor, request_ has been set to a NULL @@ -469,6 +472,10 @@ void URLRequestHttpJob::NotifyHeadersComplete() { response_info_ = transaction_->GetResponseInfo(); + // Save boolean, as we'll need this info at destruction time, and filters may + // also need this info. + is_cached_content_ = response_info_->was_cached; + // Get the Set-Cookie values, and send them to our cookie database. if (!(request_info_.load_flags & net::LOAD_DO_NOT_SAVE_COOKIES)) { URLRequestContext* ctx = request_->context(); diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h index 57764ef..a946f62 100644 --- a/net/url_request/url_request_http_job.h +++ b/net/url_request/url_request_http_job.h @@ -45,6 +45,7 @@ class URLRequestHttpJob : public URLRequestJob { virtual int GetResponseCode() const; virtual bool GetContentEncodings( std::vector<Filter::FilterType>* encoding_type); + virtual bool IsCachedContent() const { return is_cached_content_; } virtual bool IsSdchResponse() const; virtual bool IsRedirectResponse(GURL* location, int* http_status_code); virtual bool IsSafeRedirect(const GURL& location); @@ -105,6 +106,9 @@ class URLRequestHttpJob : public URLRequestJob { bool sdch_test_activated_; // Advertising a dictionary for sdch. bool sdch_test_control_; // Not even accepting-content sdch. + // For recording of stats, we need to remember if this is cached content. + bool is_cached_content_; + DISALLOW_COPY_AND_ASSIGN(URLRequestHttpJob); }; diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc index 6175a0d..088bf50 100644 --- a/net/url_request/url_request_job.cc +++ b/net/url_request/url_request_job.cc @@ -118,12 +118,6 @@ base::Time URLRequestJob::GetRequestTime() const { return request_->request_time(); }; -bool URLRequestJob::IsCachedContent() const { - if (!request_) - return false; - return request_->was_cached(); -}; - // This function calls ReadData to get stream data. If a filter exists, passes // the data to the attached filter. Then returns the output from filter back to // the caller. @@ -584,19 +578,26 @@ void URLRequestJob::EnablePacketCounting(size_t max_packets_timed) { void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const { if (!packet_timing_enabled_ || (final_packet_time_ == base::Time())) return; + + // Caller should verify that we're not cached content, but we can't always + // really check for it here because we may (at destruction time) call our own + // class method and get a bogus const answer of false. This DCHECK only helps + // when this method has a valid overridden definition. + DCHECK(!IsCachedContent()); + base::TimeDelta duration = final_packet_time_ - request_time_snapshot_; switch (statistic) { case SDCH_DECODE: { - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_Latency_F_a", duration, + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_Latency_F_a", duration, base::TimeDelta::FromMilliseconds(20), base::TimeDelta::FromMinutes(10), 100); - UMA_HISTOGRAM_COUNTS_100("Sdch2.Network_Decode_Packets_b", + UMA_HISTOGRAM_COUNTS_100("Sdch3.Network_Decode_Packets_b", static_cast<int>(observed_packet_count_)); - UMA_HISTOGRAM_COUNTS("Sdch2.Network_Decode_Bytes_Processed_a", + UMA_HISTOGRAM_COUNTS("Sdch3.Network_Decode_Bytes_Processed_a", static_cast<int>(bytes_observed_in_packets_)); if (packet_times_.empty()) return; - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_1st_To_Last_a", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_1st_To_Last_a", final_packet_time_ - packet_times_[0], base::TimeDelta::FromMilliseconds(20), base::TimeDelta::FromMinutes(10), 100); @@ -605,19 +606,19 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const { DCHECK(kSdchPacketHistogramCount > 4); if (packet_times_.size() <= 4) return; - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_1st_To_2nd_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_1st_To_2nd_c", packet_times_[1] - packet_times_[0], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_2nd_To_3rd_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_2nd_To_3rd_c", packet_times_[2] - packet_times_[1], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_3rd_To_4th_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_3rd_To_4th_c", packet_times_[3] - packet_times_[2], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_4th_To_5th_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_4th_To_5th_c", packet_times_[4] - packet_times_[3], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); @@ -626,15 +627,15 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const { case SDCH_PASSTHROUGH: { // Despite advertising a dictionary, we handled non-sdch compressed // content. - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_Latency_F_a", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_Latency_F_a", duration, base::TimeDelta::FromMilliseconds(20), base::TimeDelta::FromMinutes(10), 100); - UMA_HISTOGRAM_COUNTS_100("Sdch2.Network_Pass-through_Packets_b", + UMA_HISTOGRAM_COUNTS_100("Sdch3.Network_Pass-through_Packets_b", observed_packet_count_); if (packet_times_.empty()) return; - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_1st_To_Last_a", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_1st_To_Last_a", final_packet_time_ - packet_times_[0], base::TimeDelta::FromMilliseconds(20), base::TimeDelta::FromMinutes(10), 100); @@ -642,19 +643,19 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const { DCHECK(kSdchPacketHistogramCount > 4); if (packet_times_.size() <= 4) return; - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_1st_To_2nd_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_1st_To_2nd_c", packet_times_[1] - packet_times_[0], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_2nd_To_3rd_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_2nd_To_3rd_c", packet_times_[2] - packet_times_[1], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_3rd_To_4th_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_3rd_To_4th_c", packet_times_[3] - packet_times_[2], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_4th_To_5th_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_4th_To_5th_c", packet_times_[4] - packet_times_[3], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); @@ -662,7 +663,7 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const { } case SDCH_EXPERIMENT_DECODE: { - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Decode", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Decode", duration, base::TimeDelta::FromMilliseconds(20), base::TimeDelta::FromMinutes(10), 100); @@ -671,27 +672,32 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const { return; } case SDCH_EXPERIMENT_HOLDBACK: { - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback", duration, base::TimeDelta::FromMilliseconds(20), base::TimeDelta::FromMinutes(10), 100); + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_1st_To_Last_a", + final_packet_time_ - packet_times_[0], + base::TimeDelta::FromMilliseconds(20), + base::TimeDelta::FromMinutes(10), 100); + DCHECK(max_packets_timed_ >= kSdchPacketHistogramCount); DCHECK(kSdchPacketHistogramCount > 4); if (packet_times_.size() <= 4) return; - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_1st_To_2nd_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_1st_To_2nd_c", packet_times_[1] - packet_times_[0], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_2nd_To_3rd_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_2nd_To_3rd_c", packet_times_[2] - packet_times_[1], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_3rd_To_4th_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_3rd_To_4th_c", packet_times_[3] - packet_times_[2], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); - UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_4th_To_5th_c", + UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_4th_To_5th_c", packet_times_[4] - packet_times_[3], base::TimeDelta::FromMilliseconds(1), base::TimeDelta::FromSeconds(10), 100); diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h index 5ecb6d0..e76bfaa 100644 --- a/net/url_request/url_request_job.h +++ b/net/url_request/url_request_job.h @@ -205,7 +205,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>, virtual bool GetMimeType(std::string* mime_type) const { return false; } virtual bool GetURL(GURL* gurl) const; virtual base::Time GetRequestTime() const; - virtual bool IsCachedContent() const; + virtual bool IsCachedContent() const { return false; } virtual int64 GetByteReadCount() const; virtual int GetResponseCode() const { return -1; } virtual int GetInputStreamBufferSize() const { return kFilterBufSize; } |