diff options
author | jar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-01-20 18:15:02 +0000 |
---|---|---|
committer | jar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-01-20 18:15:02 +0000 |
commit | 6a1f7dd457ff97005218fd26dbc0285cf522ab1e (patch) | |
tree | db745db947b4baa1d3763d408bed192b0168b1bb /net | |
parent | b081172e58ecc67b055b955b1e45ee7ed8e1393b (diff) | |
download | chromium_src-6a1f7dd457ff97005218fd26dbc0285cf522ab1e.zip chromium_src-6a1f7dd457ff97005218fd26dbc0285cf522ab1e.tar.gz chromium_src-6a1f7dd457ff97005218fd26dbc0285cf522ab1e.tar.bz2 |
Improve SDCH stability stats and error recovery
This CL provides an exponential back-off (within
a run of chromium) for recovery to a decoding
error, rather than an full disable of SDCH when
a (minor) error appears. This will help to
recover from the common (implementation)
problem where a user has an SDCH encoded
page as a startup tab, but doesn't still
have the requisite dictionary from the
previous run. The exponential backoff is
not "time based," but instead counts down
the number of times a tentatively blacklisted
domain is accessed.
I now detect the use of the browser cache in an
SDCH decode, and use that to better identify if
external proxies have corrupted the process,
vs internal use of cache (such as at startup)
has caused a "dictionary not found" error
to emerge.
r=openvcdiff,huanr
Review URL: http://codereview.chromium.org/18355
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@8290 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net')
-rw-r--r-- | net/base/filter.cc | 10 | ||||
-rw-r--r-- | net/base/filter.h | 11 | ||||
-rw-r--r-- | net/base/sdch_filter.cc | 65 | ||||
-rw-r--r-- | net/base/sdch_filter.h | 6 | ||||
-rw-r--r-- | net/base/sdch_filter_unittest.cc | 66 | ||||
-rw-r--r-- | net/base/sdch_manager.cc | 73 | ||||
-rw-r--r-- | net/base/sdch_manager.h | 70 | ||||
-rw-r--r-- | net/url_request/url_request_job.cc | 4 |
8 files changed, 246 insertions, 59 deletions
diff --git a/net/base/filter.cc b/net/base/filter.cc index 6bf48d1..658c9a4 100644 --- a/net/base/filter.cc +++ b/net/base/filter.cc @@ -184,10 +184,11 @@ Filter* Filter::PrependNewFilter(FilterType type_id, int buffer_size, } break; } - case FILTER_TYPE_SDCH: { + case FILTER_TYPE_SDCH: + case FILTER_TYPE_SDCH_POSSIBLE: { scoped_ptr<SdchFilter> sdch_filter(new SdchFilter()); if (sdch_filter->InitBuffer(buffer_size)) { - if (sdch_filter->InitDecoding()) { + if (sdch_filter->InitDecoding(type_id)) { first_filter = sdch_filter.release(); } } @@ -321,8 +322,9 @@ void Filter::SetMimeType(const std::string& mime_type) { next_filter_->SetMimeType(mime_type); } -void Filter::SetConnectTime(const base::Time& time) { +void Filter::SetConnectTime(const base::Time& time, bool was_cached) { connect_time_ = time; + was_cached_ = was_cached; if (next_filter_.get()) - next_filter_->SetConnectTime(time); + next_filter_->SetConnectTime(time, was_cached_); } diff --git a/net/base/filter.h b/net/base/filter.h index 36a631e..71dc438 100644 --- a/net/base/filter.h +++ b/net/base/filter.h @@ -61,8 +61,9 @@ class Filter { FILTER_TYPE_DEFLATE, FILTER_TYPE_GZIP, FILTER_TYPE_BZIP2, - FILTER_TYPE_GZIP_HELPING_SDCH, + FILTER_TYPE_GZIP_HELPING_SDCH, // Gzip possible, but pass through allowed. FILTER_TYPE_SDCH, + FILTER_TYPE_SDCH_POSSIBLE, // Sdch possible, but pass through allowed. FILTER_TYPE_UNSUPPORTED, }; @@ -121,7 +122,7 @@ class Filter { void SetMimeType(const std::string& mime_type); const std::string& mime_type() const { return mime_type_; } - void SetConnectTime(const base::Time& time); + void SetConnectTime(const base::Time& time, bool was_cached); // Translate the text of a filter name (from Content-Encoding header) into a // FilterType. @@ -173,6 +174,8 @@ class Filter { base::Time connect_time() const { return connect_time_; } + bool was_cached() const { return was_cached_; } + // Buffer to hold the data to be filtered. scoped_array<char> stream_buffer_; @@ -191,8 +194,10 @@ class Filter { GURL url_; // To facilitate histogramming by individual filters, we store the connect - // time for the corresponding HTTP transaction. + // time for the corresponding HTTP transaction, as well as whether this time + // was recalled from a cached entry. base::Time connect_time_; + bool was_cached_; // To facilitate error recovery in SDCH filters, allow filter to know if // content is text/html by checking within this mime type (SDCH filter may diff --git a/net/base/sdch_filter.cc b/net/base/sdch_filter.cc index 2113ebd..9f50d31 100644 --- a/net/base/sdch_filter.cc +++ b/net/base/sdch_filter.cc @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include <limits.h> #include <ctype.h> #include <algorithm> @@ -24,7 +25,8 @@ SdchFilter::SdchFilter() source_bytes_(0), output_bytes_(0), time_of_last_read_(), - size_of_last_read_(0) { + size_of_last_read_(0), + possible_pass_through_(false) { } SdchFilter::~SdchFilter() { @@ -39,14 +41,15 @@ SdchFilter::~SdchFilter() { decoding_status_ = DECODING_ERROR; } - if (base::Time() != connect_time() && base::Time() != time_of_last_read_) { + if (!was_cached() + && base::Time() != connect_time() + && base::Time() != time_of_last_read_) { base::TimeDelta duration = time_of_last_read_ - connect_time(); - // Note: connect_time is *only* set if this was NOT cached data, so the - // latency duration should only apply to the network read of the content. // We clip our logging at 10 minutes to prevent anamolous data from being // considered (per suggestion from Jake Brutlag). - // The relatively precise histogram only properly covers the range 1ms to 10 - // seconds, so the discarded data would not be that readable anyway. + // The relatively precise histogram only properly covers the range 1ms to 3 + // minutes, so the additional range is just gathered to calculate means and + // variance as is done in other settings. if (10 >= duration.InMinutes()) { if (DECODING_IN_PROGRESS == decoding_status_) UMA_HISTOGRAM_MEDIUM_TIMES(L"Sdch.Network_Decode_Latency_M", duration); @@ -56,17 +59,20 @@ SdchFilter::~SdchFilter() { } } - UMA_HISTOGRAM_COUNTS(L"Sdch.Bytes read", source_bytes_); UMA_HISTOGRAM_COUNTS(L"Sdch.Bytes output", output_bytes_); if (dictionary_) dictionary_->Release(); } -bool SdchFilter::InitDecoding() { +bool SdchFilter::InitDecoding(Filter::FilterType filter_type) { if (decoding_status_ != DECODING_UNINITIALIZED) return false; + // Handle case where sdch filter is guessed, but not required. + if (FILTER_TYPE_SDCH_POSSIBLE == filter_type) + possible_pass_through_ = true; + // Initialize decoder only after we have a dictionary in hand. decoding_status_ = WAITING_FOR_DICTIONARY_SELECTION; return true; @@ -104,19 +110,52 @@ Filter::FilterStatus SdchFilter::ReadFilteredData(char* dest_buffer, DCHECK(DECODING_ERROR == decoding_status_); DCHECK(0 == dest_buffer_excess_index_); DCHECK(dest_buffer_excess_.empty()); - if (!dictionary_hash_is_plausible_) { + if (possible_pass_through_) { + // We added the sdch coding tag, and it should not have been added. + // This can happen in server experiments, where the server decides + // not to use sdch, even though there is a dictionary. To be + // conservative, we locally added the tentative sdch (fearing that a + // proxy stripped it!) and we must now recant (pass through). + SdchManager::SdchErrorRecovery(SdchManager::DISCARD_TENTATIVE_SDCH); + decoding_status_ = PASS_THROUGH; + dest_buffer_excess_ = dictionary_hash_; // Send what we scanned. + } else if (!dictionary_hash_is_plausible_) { // One of the first 9 bytes precluded consideration as a hash. - // This can't be an SDCH payload. + // This can't be an SDCH payload, even though the server said it was. + // This is a major error, as the server or proxy tagged this SDCH even + // though it is not! + // The good news is that error recovery is clear... SdchManager::SdchErrorRecovery(SdchManager::PASSING_THROUGH_NON_SDCH); decoding_status_ = PASS_THROUGH; dest_buffer_excess_ = dictionary_hash_; // Send what we scanned. } else { - SdchManager::BlacklistDomain(url()); + // We don't have the dictionary that was demanded. + // With very low probability, random garbage data looked like a + // dictionary specifier (8 ASCII characters followed by a null), but + // that is sufficiently unlikely that we ignore it. if (std::string::npos == mime_type().find_first_of("text/html")) { - SdchManager::SdchErrorRecovery(SdchManager::UNRECOVERABLE_ERROR); + SdchManager::BlacklistDomainForever(url()); + if (was_cached_) + SdchManager::SdchErrorRecovery( + SdchManager::CACHED_META_REFRESH_UNSUPPORTED); + else + SdchManager::SdchErrorRecovery( + SdchManager::META_REFRESH_UNSUPPORTED); return FILTER_ERROR; } - SdchManager::SdchErrorRecovery(SdchManager::META_REFRESH_RECOVERY); + // HTML content means we can issue a meta-refresh, and get the content + // again, perhaps without SDCH (to be safe). + if (was_cached_) { + // Cached content is probably a startup tab, so we'll just get fresh + // content and try again, without disabling sdch. + SdchManager::SdchErrorRecovery( + SdchManager::META_REFRESH_CACHED_RECOVERY); + } else { + // Since it wasn't in the cache, we definately need at lest some + // period of blacklisting to get the correct content. + SdchManager::BlacklistDomain(url()); + SdchManager::SdchErrorRecovery(SdchManager::META_REFRESH_RECOVERY); + } decoding_status_ = META_REFRESH_RECOVERY; // Issue a meta redirect with SDCH disabled. dest_buffer_excess_ = kDecompressionErrorHtml; diff --git a/net/base/sdch_filter.h b/net/base/sdch_filter.h index 1c62202..bfc9681 100644 --- a/net/base/sdch_filter.h +++ b/net/base/sdch_filter.h @@ -33,7 +33,7 @@ class SdchFilter : public Filter { virtual ~SdchFilter(); // Initializes filter decoding mode and internal control blocks. - bool InitDecoding(); + bool InitDecoding(Filter::FilterType filter_type); // Decode the pre-filter data and writes the output into |dest_buffer| // The function returns FilterStatus. See filter.h for its description. @@ -104,6 +104,10 @@ class SdchFilter : public Filter { // How large was the most recent non-zero size data chunk? int size_of_last_read_; + // Error recovery in content type may add an sdch filter type, in which case + // we should gracefully perform pass through if the format is incorrect, or + // an applicable dictionary can't be found. + bool possible_pass_through_; DISALLOW_COPY_AND_ASSIGN(SdchFilter); }; diff --git a/net/base/sdch_filter_unittest.cc b/net/base/sdch_filter_unittest.cc index 9ed9710..21d9aee 100644 --- a/net/base/sdch_filter_unittest.cc +++ b/net/base/sdch_filter_unittest.cc @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include <limits.h> + #include <algorithm> #include <string> #include <vector> @@ -739,10 +741,71 @@ TEST_F(SdchFilterTest, DomainBlacklisting) { EXPECT_TRUE(SdchManager::Global()->IsInSupportedDomain(google_url)); SdchManager::BlacklistDomain(google_url); - EXPECT_FALSE(SdchManager::Global()->IsInSupportedDomain(test_url)); EXPECT_FALSE(SdchManager::Global()->IsInSupportedDomain(google_url)); } +TEST_F(SdchFilterTest, DomainBlacklistingCaseSensitivity) { + GURL test_url("http://www.TesT.com"); + GURL test2_url("http://www.tEst.com"); + + EXPECT_TRUE(SdchManager::Global()->IsInSupportedDomain(test_url)); + EXPECT_TRUE(SdchManager::Global()->IsInSupportedDomain(test2_url)); + SdchManager::BlacklistDomain(test_url); + EXPECT_FALSE(SdchManager::Global()->IsInSupportedDomain(test2_url)); +} + +TEST_F(SdchFilterTest, BlacklistingReset) { + GURL gurl("http://mytest.DoMain.com"); + std::string domain(gurl.host()); + + SdchManager::ClearBlacklistings(); + EXPECT_EQ(SdchManager::BlackListDomainCount(domain), 0); + EXPECT_EQ(SdchManager::BlacklistDomainExponential(domain), 0); + EXPECT_TRUE(SdchManager::Global()->IsInSupportedDomain(gurl)); +} + +TEST_F(SdchFilterTest, BlacklistingSingleBlacklist) { + GURL gurl("http://mytest.DoMain.com"); + std::string domain(gurl.host()); + SdchManager::ClearBlacklistings(); + + SdchManager::Global()->BlacklistDomain(gurl); + EXPECT_EQ(SdchManager::BlackListDomainCount(domain), 1); + EXPECT_EQ(SdchManager::BlacklistDomainExponential(domain), 1); + + // Check that any domain lookup reduces the blacklist counter. + EXPECT_FALSE(SdchManager::Global()->IsInSupportedDomain(gurl)); + EXPECT_EQ(SdchManager::BlackListDomainCount(domain), 0); + EXPECT_TRUE(SdchManager::Global()->IsInSupportedDomain(gurl)); +} + +TEST_F(SdchFilterTest, BlacklistingExponential) { + GURL gurl("http://mytest.DoMain.com"); + std::string domain(gurl.host()); + SdchManager::ClearBlacklistings(); + + int exponential = 1; + for (int i = 1; i < 100; ++i) { + SdchManager::Global()->BlacklistDomain(gurl); + EXPECT_EQ(SdchManager::BlacklistDomainExponential(domain), exponential); + + EXPECT_EQ(SdchManager::BlackListDomainCount(domain), exponential); + EXPECT_FALSE(SdchManager::Global()->IsInSupportedDomain(gurl)); + EXPECT_EQ(SdchManager::BlackListDomainCount(domain), exponential - 1); + + // Simulate a large number of domain checks (which eventually remove the + // blacklisting). + SdchManager::ClearDomainBlacklisting(domain); + EXPECT_EQ(SdchManager::BlackListDomainCount(domain), 0); + EXPECT_TRUE(SdchManager::Global()->IsInSupportedDomain(gurl)); + + // Predict what exponential backoff will be. + exponential = 1 + 2 * exponential; + if (exponential < 0) + exponential = INT_MAX; // We don't wrap. + } +} + TEST_F(SdchFilterTest, CanSetExactMatchDictionary) { std::string dictionary_domain("x.y.z.google.com"); std::string dictionary_text(NewSdchDictionary(dictionary_domain)); @@ -840,3 +903,4 @@ TEST_F(SdchFilterTest, DictionaryTooLarge) { EXPECT_FALSE(sdch_manager_->AddSdchDictionary(dictionary_text, GURL("http://" + dictionary_domain))); } + diff --git a/net/base/sdch_manager.cc b/net/base/sdch_manager.cc index d11eea0..be8ae13 100644 --- a/net/base/sdch_manager.cc +++ b/net/base/sdch_manager.cc @@ -32,7 +32,7 @@ SdchManager* SdchManager::Global() { // static void SdchManager::SdchErrorRecovery(ProblemCodes problem) { - static LinearHistogram histogram(L"Sdch.ProblemCodes", MIN_PROBLEM_CODE, + static LinearHistogram histogram(L"Sdch.ProblemCodes_2", MIN_PROBLEM_CODE, MAX_PROBLEM_CODE - 1, MAX_PROBLEM_CODE); histogram.SetFlags(kUmaTargetedHistogramFlag); histogram.Add(problem); @@ -41,8 +41,29 @@ void SdchManager::SdchErrorRecovery(ProblemCodes problem) { // static void SdchManager::ClearBlacklistings() { Global()->blacklisted_domains_.clear(); + Global()->exponential_blacklist_count.clear(); } +// static +void SdchManager::ClearDomainBlacklisting(std::string domain) { + Global()->blacklisted_domains_.erase(StringToLowerASCII(domain)); +} + +// static +int SdchManager::BlackListDomainCount(std::string domain) { + if (Global()->blacklisted_domains_.end() == + Global()->blacklisted_domains_.find(domain)) + return 0; + return Global()->blacklisted_domains_[StringToLowerASCII(domain)]; +} + +// static +int SdchManager::BlacklistDomainExponential(std::string domain) { + if (Global()->exponential_blacklist_count.end() == + Global()->exponential_blacklist_count.find(domain)) + return 0; + return Global()->exponential_blacklist_count[StringToLowerASCII(domain)]; +} //------------------------------------------------------------------------------ SdchManager::SdchManager() : sdch_enabled_(false) { @@ -61,14 +82,32 @@ SdchManager::~SdchManager() { } // static -bool SdchManager::BlacklistDomain(const GURL& url) { +void SdchManager::BlacklistDomain(const GURL& url) { if (!global_ ) - return false; - UMA_HISTOGRAM_MEDIUM_TIMES(L"Sdch.UptimeBeforeBlacklisting_M", - Time::Now() - FieldTrialList::application_start_time()); + return; + std::string domain(StringToLowerASCII(url.host())); - global_->blacklisted_domains_.insert(domain); - return true; + int count = global_->blacklisted_domains_[domain]; + if (count > 0) + return; // Domain is already blacklisted. + + count = 1 + 2 * global_->exponential_blacklist_count[domain]; + if (count > 0) + global_->exponential_blacklist_count[domain] = count; + else + count = INT_MAX; + + global_->blacklisted_domains_[domain] = count; +} + +// static +void SdchManager::BlacklistDomainForever(const GURL& url) { + if (!global_ ) + return; + + std::string domain(StringToLowerASCII(url.host())); + global_->exponential_blacklist_count[domain] = INT_MAX; + global_->blacklisted_domains_[domain] = INT_MAX; } void SdchManager::EnableSdchSupport(const std::string& domain) { @@ -77,7 +116,7 @@ void SdchManager::EnableSdchSupport(const std::string& domain) { global_->sdch_enabled_ = true; } -const bool SdchManager::IsInSupportedDomain(const GURL& url) const { +const bool SdchManager::IsInSupportedDomain(const GURL& url) { if (!sdch_enabled_ ) return false; if (!supported_domain_.empty() && @@ -87,12 +126,18 @@ const bool SdchManager::IsInSupportedDomain(const GURL& url) const { if (blacklisted_domains_.empty()) return true; - std::string domain = StringToLowerASCII(url.host()); - bool was_blacklisted(blacklisted_domains_.end() != - blacklisted_domains_.find(domain)); - if (was_blacklisted) - SdchErrorRecovery(DOMAIN_BLACKLIST_INCLUDES_TARGET); - return !was_blacklisted; + std::string domain(StringToLowerASCII(url.host())); + DomainCounter::iterator it = blacklisted_domains_.find(domain); + if (blacklisted_domains_.end() == it) + return true; + + int count = it->second - 1; + if (count > 0) + blacklisted_domains_[domain] = count; + else + blacklisted_domains_.erase(domain); + SdchErrorRecovery(DOMAIN_BLACKLIST_INCLUDES_TARGET); + return false; } bool SdchManager::CanFetchDictionary(const GURL& referring_url, diff --git a/net/base/sdch_manager.h b/net/base/sdch_manager.h index 1694318..5870c17 100644 --- a/net/base/sdch_manager.h +++ b/net/base/sdch_manager.h @@ -57,16 +57,16 @@ class SdchManager { MIN_PROBLEM_CODE, // Content-encoding correction problems. - ADDED_CONTENT_ENCODING, - FIXED_CONTENT_ENCODING, - FIXED_CONTENT_ENCODINGS, + ADDED_CONTENT_ENCODING = 1, + FIXED_CONTENT_ENCODING = 2, + FIXED_CONTENT_ENCODINGS = 3, // Content decoding errors. - DECODE_HEADER_ERROR, - DECODE_BODY_ERROR, + DECODE_HEADER_ERROR = 4, + DECODE_BODY_ERROR = 5, // More content-encoding correction problems. - OPTIONAL_GUNZIP_ENCODING_ADDED, + OPTIONAL_GUNZIP_ENCODING_ADDED = 6, // Dictionary selection for use problems. DICTIONARY_FOUND_HAS_WRONG_DOMAIN = 10, @@ -76,11 +76,6 @@ class SdchManager { DICTIONARY_HASH_NOT_FOUND = 14, DICTIONARY_HASH_MALFORMED = 15, - // Decode recovery methods. - META_REFRESH_RECOVERY = 16, - PASSING_THROUGH_NON_SDCH = 17, - UNRECOVERABLE_ERROR = 18, - // Dictionary saving problems. DICTIONARY_HAS_NO_HEADER = 20, DICTIONARY_HEADER_LINE_MISSING_COLON = 21, @@ -108,9 +103,17 @@ class SdchManager { SDCH_CONTENT_ENCODE_FOR_NON_SDCH_REQUEST = 51, // Dictionary manager issues. - PLEASE_IGNORE_THIS_ENUM = 60, // Erroneous use in Version 1.0 of Chrome. DOMAIN_BLACKLIST_INCLUDES_TARGET = 61, + // Problematic decode recovery methods. + META_REFRESH_RECOVERY = 70, // Dictionary not found. + META_REFRESH_UNSUPPORTED = 71, // Unrecoverable error. + CACHED_META_REFRESH_UNSUPPORTED = 72, // As above, but pulled from cache. + PASSING_THROUGH_NON_SDCH = 73, // Non-html tagged as sdch but malformed. + + // Common decoded recovery methods. + META_REFRESH_CACHED_RECOVERY = 80, // Probably startup tab loading. + DISCARD_TENTATIVE_SDCH = 81, // Server decided not to use sdch. MAX_PROBLEM_CODE // Used to bound histogram. }; @@ -203,19 +206,37 @@ class SdchManager { static bool sdch_enabled() { return global_ && global_->sdch_enabled_; } - // Prevent further advertising of SDCH on this domain (if SDCH is enabled). - // Used when filter errors are found from a given domain, to prevent further - // use of SDCH on that domain. - static bool BlacklistDomain(const GURL& url); + // Briefly prevent further advertising of SDCH on this domain (if SDCH is + // enabled). After enough calls to IsInSupportedDomain() the blacklisting + // will be removed. Additional blacklists take exponentially more calls + // to IsInSupportedDomain() before the blacklisting is undone. + // Used when filter errors are found from a given domain, but it is plausible + // that the cause is temporary (such as application startup, where cached + // entries are used, but a dictionary is not yet loaded). + static void BlacklistDomain(const GURL& url); - // For testing only, tihs function resets enabling of sdch, and clears the + // Used when SEVERE filter errors are found from a given domain, to prevent + // further use of SDCH on that domain. + static void BlacklistDomainForever(const GURL& url); + + // Unit test only, this function resets enabling of sdch, and clears the // blacklist. static void ClearBlacklistings(); + // Unit test only, this function resets the blacklisting count for a domain. + static void ClearDomainBlacklisting(std::string domain); + + // Unit test only: indicate how many more times a domain will be blacklisted. + static int BlackListDomainCount(std::string domain); + + // Unit test only: Indicate what current blacklist increment is for a domain. + static int BlacklistDomainExponential(std::string domain); + // Check to see if SDCH is enabled (globally), and the given URL is in a // supported domain (i.e., not blacklisted, and either the specific supported - // domain, or all domains were assumed supported). - const bool IsInSupportedDomain(const GURL& url) const; + // domain, or all domains were assumed supported). If it is blacklist, reduce + // by 1 the number of times it will be reported as blacklisted. + const bool IsInSupportedDomain(const GURL& url); // Schedule the URL fetching to load a dictionary. This will generally return // long before the dictionary is actually loaded and added. @@ -258,6 +279,8 @@ class SdchManager { std::string* client_hash, std::string* server_hash); private: + typedef std::map<const std::string, int> DomainCounter; + // A map of dictionaries info indexed by the hash that the server provides. typedef std::map<std::string, Dictionary*> DictionaryMap; @@ -279,8 +302,13 @@ class SdchManager { // domain is supported. std::string supported_domain_; - // List domains where decode failures have required disabling sdch. - std::set<std::string> blacklisted_domains_; + // List domains where decode failures have required disabling sdch, along with + // count of how many additonal uses should be blacklisted. + DomainCounter blacklisted_domains_; + + // Support exponential backoff in number of domain accesses before + // blacklisting expires. + DomainCounter exponential_blacklist_count; DISALLOW_COPY_AND_ASSIGN(SdchManager); }; diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc index c30388f..6e6a1df 100644 --- a/net/url_request/url_request_job.cc +++ b/net/url_request/url_request_job.cc @@ -61,8 +61,8 @@ void URLRequestJob::SetupFilter() { // Approximate connect time with request_time. If it is not cached, then // this is a good approximation for when the first bytes went on the // wire. - if (!request_->response_info_.was_cached) - filter_->SetConnectTime(request_->response_info_.request_time); + filter_->SetConnectTime(request_->response_info_.request_time, + request_->response_info_.was_cached); } } } |