summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--net/base/sdch_filter.cc20
-rw-r--r--net/base/sdch_manager.cc10
-rw-r--r--net/base/sdch_manager.h1
-rw-r--r--net/url_request/url_request_http_job.cc17
-rw-r--r--net/url_request/url_request_http_job.h4
-rw-r--r--net/url_request/url_request_job.cc60
-rw-r--r--net/url_request/url_request_job.h2
7 files changed, 67 insertions, 47 deletions
diff --git a/net/base/sdch_filter.cc b/net/base/sdch_filter.cc
index ce0acb8..2c747bc 100644
--- a/net/base/sdch_filter.cc
+++ b/net/base/sdch_filter.cc
@@ -40,7 +40,7 @@ SdchFilter::~SdchFilter() {
static int filter_use_count = 0;
++filter_use_count;
if (META_REFRESH_RECOVERY == decoding_status_) {
- UMA_HISTOGRAM_COUNTS("Sdch2.FilterUseBeforeDisabling", filter_use_count);
+ UMA_HISTOGRAM_COUNTS("Sdch3.FilterUseBeforeDisabling", filter_use_count);
}
if (vcdiff_streaming_decoder_.get()) {
@@ -51,22 +51,22 @@ SdchFilter::~SdchFilter() {
// Note this will "wear off" quickly enough, and is just meant to assure
// in some rare case that the user is not stuck.
SdchManager::BlacklistDomain(url_);
- UMA_HISTOGRAM_COUNTS("Sdch2.PartialBytesIn",
+ UMA_HISTOGRAM_COUNTS("Sdch3.PartialBytesIn",
static_cast<int>(filter_context().GetByteReadCount()));
- UMA_HISTOGRAM_COUNTS("Sdch2.PartialVcdiffIn", source_bytes_);
- UMA_HISTOGRAM_COUNTS("Sdch2.PartialVcdiffOut", output_bytes_);
+ UMA_HISTOGRAM_COUNTS("Sdch3.PartialVcdiffIn", source_bytes_);
+ UMA_HISTOGRAM_COUNTS("Sdch3.PartialVcdiffOut", output_bytes_);
}
}
if (!dest_buffer_excess_.empty()) {
// Filter chaining error, or premature teardown.
SdchManager::SdchErrorRecovery(SdchManager::UNFLUSHED_CONTENT);
- UMA_HISTOGRAM_COUNTS("Sdch2.UnflushedBytesIn",
+ UMA_HISTOGRAM_COUNTS("Sdch3.UnflushedBytesIn",
static_cast<int>(filter_context().GetByteReadCount()));
- UMA_HISTOGRAM_COUNTS("Sdch2.UnflushedBufferSize",
+ UMA_HISTOGRAM_COUNTS("Sdch3.UnflushedBufferSize",
dest_buffer_excess_.size());
- UMA_HISTOGRAM_COUNTS("Sdch2.UnflushedVcdiffIn", source_bytes_);
- UMA_HISTOGRAM_COUNTS("Sdch2.UnflushedVcdiffOut", output_bytes_);
+ UMA_HISTOGRAM_COUNTS("Sdch3.UnflushedVcdiffIn", source_bytes_);
+ UMA_HISTOGRAM_COUNTS("Sdch3.UnflushedVcdiffOut", output_bytes_);
}
if (was_cached_) {
@@ -79,10 +79,10 @@ SdchFilter::~SdchFilter() {
switch (decoding_status_) {
case DECODING_IN_PROGRESS: {
if (output_bytes_)
- UMA_HISTOGRAM_PERCENTAGE("Sdch2.Network_Decode_Ratio_a",
+ UMA_HISTOGRAM_PERCENTAGE("Sdch3.Network_Decode_Ratio_a",
static_cast<int>(
(filter_context().GetByteReadCount() * 100) / output_bytes_));
- UMA_HISTOGRAM_COUNTS("Sdch2.Network_Decode_Bytes_VcdiffOut_a",
+ UMA_HISTOGRAM_COUNTS("Sdch3.Network_Decode_Bytes_VcdiffOut_a",
output_bytes_);
filter_context().RecordPacketStats(FilterContext::SDCH_DECODE);
diff --git a/net/base/sdch_manager.cc b/net/base/sdch_manager.cc
index 05b6475..7de9f0b 100644
--- a/net/base/sdch_manager.cc
+++ b/net/base/sdch_manager.cc
@@ -32,7 +32,7 @@ SdchManager* SdchManager::Global() {
// static
void SdchManager::SdchErrorRecovery(ProblemCodes problem) {
- static LinearHistogram histogram("Sdch2.ProblemCodes_3", MIN_PROBLEM_CODE,
+ static LinearHistogram histogram("Sdch3.ProblemCodes_3", MIN_PROBLEM_CODE,
MAX_PROBLEM_CODE - 1, MAX_PROBLEM_CODE);
histogram.SetFlags(kUmaTargetedHistogramFlag);
histogram.Add(problem);
@@ -272,7 +272,7 @@ bool SdchManager::AddSdchDictionary(const std::string& dictionary_text,
return false;
}
- UMA_HISTOGRAM_COUNTS("Sdch2.Dictionary size loaded", dictionary_text.size());
+ UMA_HISTOGRAM_COUNTS("Sdch3.Dictionary size loaded", dictionary_text.size());
DLOG(INFO) << "Loaded dictionary with client hash " << client_hash <<
" and server hash " << server_hash;
Dictionary* dictionary =
@@ -313,7 +313,7 @@ void SdchManager::GetAvailDictionaryList(const GURL& target_url,
}
// Watch to see if we have corrupt or numerous dictionaries.
if (count > 0)
- UMA_HISTOGRAM_COUNTS("Sdch2.Advertisement_Count", count);
+ UMA_HISTOGRAM_COUNTS("Sdch3.Advertisement_Count", count);
}
SdchManager::Dictionary::Dictionary(const std::string& dictionary_text,
@@ -413,8 +413,10 @@ bool SdchManager::Dictionary::CanSet(const std::string& domain,
// It is a postfix... so check to see if there's a dot in the prefix.
size_t end_of_host_index = referrer_url_host.find_first_of('.');
if (referrer_url_host.npos != end_of_host_index &&
- end_of_host_index < postfix_domain_index)
+ end_of_host_index < postfix_domain_index) {
+ SdchErrorRecovery(DICTIONARY_REFERER_URL_HAS_DOT_IN_PREFIX);
return false;
+ }
}
if (!ports.empty()
diff --git a/net/base/sdch_manager.h b/net/base/sdch_manager.h
index 43fa484..679d771 100644
--- a/net/base/sdch_manager.h
+++ b/net/base/sdch_manager.h
@@ -90,6 +90,7 @@ class SdchManager {
DICTIONARY_DOMAIN_NOT_MATCHING_SOURCE_URL = 24,
DICTIONARY_PORT_NOT_MATCHING_SOURCE_URL = 25,
DICTIONARY_HAS_NO_TEXT = 26,
+ DICTIONARY_REFERER_URL_HAS_DOT_IN_PREFIX = 27,
// Dictionary loading problems.
DICTIONARY_LOAD_ATTEMPT_FROM_DIFFERENT_HOST = 30,
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
index 9c81e00..143b0a8 100644
--- a/net/url_request/url_request_http_job.cc
+++ b/net/url_request/url_request_http_job.cc
@@ -67,15 +67,18 @@ URLRequestHttpJob::URLRequestHttpJob(URLRequest* request)
context_(request->context()),
sdch_dictionary_advertised_(false),
sdch_test_activated_(false),
- sdch_test_control_(false) {
+ sdch_test_control_(false),
+ is_cached_content_(false) {
}
URLRequestHttpJob::~URLRequestHttpJob() {
DCHECK(!sdch_test_control_ || !sdch_test_activated_);
- if (sdch_test_control_)
- RecordPacketStats(SDCH_EXPERIMENT_HOLDBACK);
- if (sdch_test_activated_)
- RecordPacketStats(SDCH_EXPERIMENT_DECODE);
+ if (!IsCachedContent()) {
+ if (sdch_test_control_)
+ RecordPacketStats(SDCH_EXPERIMENT_HOLDBACK);
+ if (sdch_test_activated_)
+ RecordPacketStats(SDCH_EXPERIMENT_DECODE);
+ }
if (sdch_dictionary_url_.is_valid()) {
// Prior to reaching the destructor, request_ has been set to a NULL
@@ -469,6 +472,10 @@ void URLRequestHttpJob::NotifyHeadersComplete() {
response_info_ = transaction_->GetResponseInfo();
+ // Save boolean, as we'll need this info at destruction time, and filters may
+ // also need this info.
+ is_cached_content_ = response_info_->was_cached;
+
// Get the Set-Cookie values, and send them to our cookie database.
if (!(request_info_.load_flags & net::LOAD_DO_NOT_SAVE_COOKIES)) {
URLRequestContext* ctx = request_->context();
diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h
index 57764ef..a946f62 100644
--- a/net/url_request/url_request_http_job.h
+++ b/net/url_request/url_request_http_job.h
@@ -45,6 +45,7 @@ class URLRequestHttpJob : public URLRequestJob {
virtual int GetResponseCode() const;
virtual bool GetContentEncodings(
std::vector<Filter::FilterType>* encoding_type);
+ virtual bool IsCachedContent() const { return is_cached_content_; }
virtual bool IsSdchResponse() const;
virtual bool IsRedirectResponse(GURL* location, int* http_status_code);
virtual bool IsSafeRedirect(const GURL& location);
@@ -105,6 +106,9 @@ class URLRequestHttpJob : public URLRequestJob {
bool sdch_test_activated_; // Advertising a dictionary for sdch.
bool sdch_test_control_; // Not even accepting-content sdch.
+ // For recording of stats, we need to remember if this is cached content.
+ bool is_cached_content_;
+
DISALLOW_COPY_AND_ASSIGN(URLRequestHttpJob);
};
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc
index 6175a0d..088bf50 100644
--- a/net/url_request/url_request_job.cc
+++ b/net/url_request/url_request_job.cc
@@ -118,12 +118,6 @@ base::Time URLRequestJob::GetRequestTime() const {
return request_->request_time();
};
-bool URLRequestJob::IsCachedContent() const {
- if (!request_)
- return false;
- return request_->was_cached();
-};
-
// This function calls ReadData to get stream data. If a filter exists, passes
// the data to the attached filter. Then returns the output from filter back to
// the caller.
@@ -584,19 +578,26 @@ void URLRequestJob::EnablePacketCounting(size_t max_packets_timed) {
void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
if (!packet_timing_enabled_ || (final_packet_time_ == base::Time()))
return;
+
+ // Caller should verify that we're not cached content, but we can't always
+ // really check for it here because we may (at destruction time) call our own
+ // class method and get a bogus const answer of false. This DCHECK only helps
+ // when this method has a valid overridden definition.
+ DCHECK(!IsCachedContent());
+
base::TimeDelta duration = final_packet_time_ - request_time_snapshot_;
switch (statistic) {
case SDCH_DECODE: {
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_Latency_F_a", duration,
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_Latency_F_a", duration,
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
- UMA_HISTOGRAM_COUNTS_100("Sdch2.Network_Decode_Packets_b",
+ UMA_HISTOGRAM_COUNTS_100("Sdch3.Network_Decode_Packets_b",
static_cast<int>(observed_packet_count_));
- UMA_HISTOGRAM_COUNTS("Sdch2.Network_Decode_Bytes_Processed_a",
+ UMA_HISTOGRAM_COUNTS("Sdch3.Network_Decode_Bytes_Processed_a",
static_cast<int>(bytes_observed_in_packets_));
if (packet_times_.empty())
return;
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_1st_To_Last_a",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_1st_To_Last_a",
final_packet_time_ - packet_times_[0],
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
@@ -605,19 +606,19 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
DCHECK(kSdchPacketHistogramCount > 4);
if (packet_times_.size() <= 4)
return;
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_1st_To_2nd_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_1st_To_2nd_c",
packet_times_[1] - packet_times_[0],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_2nd_To_3rd_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_2nd_To_3rd_c",
packet_times_[2] - packet_times_[1],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_3rd_To_4th_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_3rd_To_4th_c",
packet_times_[3] - packet_times_[2],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Decode_4th_To_5th_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_4th_To_5th_c",
packet_times_[4] - packet_times_[3],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
@@ -626,15 +627,15 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
case SDCH_PASSTHROUGH: {
// Despite advertising a dictionary, we handled non-sdch compressed
// content.
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_Latency_F_a",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_Latency_F_a",
duration,
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
- UMA_HISTOGRAM_COUNTS_100("Sdch2.Network_Pass-through_Packets_b",
+ UMA_HISTOGRAM_COUNTS_100("Sdch3.Network_Pass-through_Packets_b",
observed_packet_count_);
if (packet_times_.empty())
return;
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_1st_To_Last_a",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_1st_To_Last_a",
final_packet_time_ - packet_times_[0],
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
@@ -642,19 +643,19 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
DCHECK(kSdchPacketHistogramCount > 4);
if (packet_times_.size() <= 4)
return;
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_1st_To_2nd_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_1st_To_2nd_c",
packet_times_[1] - packet_times_[0],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_2nd_To_3rd_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_2nd_To_3rd_c",
packet_times_[2] - packet_times_[1],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_3rd_To_4th_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_3rd_To_4th_c",
packet_times_[3] - packet_times_[2],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Network_Pass-through_4th_To_5th_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_4th_To_5th_c",
packet_times_[4] - packet_times_[3],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
@@ -662,7 +663,7 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
}
case SDCH_EXPERIMENT_DECODE: {
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Decode",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Decode",
duration,
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
@@ -671,27 +672,32 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
return;
}
case SDCH_EXPERIMENT_HOLDBACK: {
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback",
duration,
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_1st_To_Last_a",
+ final_packet_time_ - packet_times_[0],
+ base::TimeDelta::FromMilliseconds(20),
+ base::TimeDelta::FromMinutes(10), 100);
+
DCHECK(max_packets_timed_ >= kSdchPacketHistogramCount);
DCHECK(kSdchPacketHistogramCount > 4);
if (packet_times_.size() <= 4)
return;
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_1st_To_2nd_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_1st_To_2nd_c",
packet_times_[1] - packet_times_[0],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_2nd_To_3rd_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_2nd_To_3rd_c",
packet_times_[2] - packet_times_[1],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_3rd_To_4th_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_3rd_To_4th_c",
packet_times_[3] - packet_times_[2],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
- UMA_HISTOGRAM_CLIPPED_TIMES("Sdch2.Experiment_Holdback_4th_To_5th_c",
+ UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback_4th_To_5th_c",
packet_times_[4] - packet_times_[3],
base::TimeDelta::FromMilliseconds(1),
base::TimeDelta::FromSeconds(10), 100);
diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h
index 5ecb6d0..e76bfaa 100644
--- a/net/url_request/url_request_job.h
+++ b/net/url_request/url_request_job.h
@@ -205,7 +205,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>,
virtual bool GetMimeType(std::string* mime_type) const { return false; }
virtual bool GetURL(GURL* gurl) const;
virtual base::Time GetRequestTime() const;
- virtual bool IsCachedContent() const;
+ virtual bool IsCachedContent() const { return false; }
virtual int64 GetByteReadCount() const;
virtual int GetResponseCode() const { return -1; }
virtual int GetInputStreamBufferSize() const { return kFilterBufSize; }