diff options
author | brettw <brettw@chromium.org> | 2015-06-09 15:39:08 -0700 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2015-06-09 22:39:37 +0000 |
commit | bc17d2c8d864a118f48a84de0709f5a6c463cffd (patch) | |
tree | 1b22817d8ab1d8797b14d734f0f2924c4db1736b /net | |
parent | 952985e3821fea40f284004cae13795f8a3db489 (diff) | |
download | chromium_src-bc17d2c8d864a118f48a84de0709f5a6c463cffd.zip chromium_src-bc17d2c8d864a118f48a84de0709f5a6c463cffd.tar.gz chromium_src-bc17d2c8d864a118f48a84de0709f5a6c463cffd.tar.bz2 |
Move LowerCaseEqualsASCII to base namespace
Remove url:: variants. Add the 4-element version from url:: to base::
Review URL: https://codereview.chromium.org/1172753003
Cr-Commit-Position: refs/heads/master@{#333597}
Diffstat (limited to 'net')
-rw-r--r-- | net/base/host_mapping_rules.cc | 4 | ||||
-rw-r--r-- | net/base/net_util.cc | 2 | ||||
-rw-r--r-- | net/filter/filter.cc | 8 | ||||
-rw-r--r-- | net/http/http_auth.cc | 3 | ||||
-rw-r--r-- | net/http/http_auth_gssapi_posix.cc | 4 | ||||
-rw-r--r-- | net/http/http_auth_handler_basic.cc | 4 | ||||
-rw-r--r-- | net/http/http_auth_handler_digest.cc | 32 | ||||
-rw-r--r-- | net/http/http_auth_handler_mock.cc | 2 | ||||
-rw-r--r-- | net/http/http_auth_handler_ntlm.cc | 2 | ||||
-rw-r--r-- | net/http/http_auth_sspi_win.cc | 4 | ||||
-rw-r--r-- | net/http/http_cache_transaction.cc | 3 | ||||
-rw-r--r-- | net/http/http_content_disposition.cc | 16 | ||||
-rw-r--r-- | net/http/http_response_headers.cc | 21 | ||||
-rw-r--r-- | net/http/http_security_headers.cc | 14 | ||||
-rw-r--r-- | net/http/http_util.cc | 30 | ||||
-rw-r--r-- | net/proxy/proxy_bypass_rules.cc | 2 | ||||
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl.cc | 2 | ||||
-rw-r--r-- | net/proxy/proxy_server.cc | 28 | ||||
-rw-r--r-- | net/test/url_request/url_request_slow_download_job.cc | 23 | ||||
-rw-r--r-- | net/url_request/url_request_file_job.cc | 7 | ||||
-rw-r--r-- | net/url_request/url_request_job_manager.cc | 2 | ||||
-rw-r--r-- | net/websockets/websocket_basic_handshake_stream.cc | 2 |
22 files changed, 112 insertions, 103 deletions
diff --git a/net/base/host_mapping_rules.cc b/net/base/host_mapping_rules.cc index a2789c0..179a0dc8 100644 --- a/net/base/host_mapping_rules.cc +++ b/net/base/host_mapping_rules.cc @@ -72,7 +72,7 @@ bool HostMappingRules::AddRuleFromString(const std::string& rule_string) { base::SplitString(trimmed, ' ', &parts); // Test for EXCLUSION rule. - if (parts.size() == 2 && LowerCaseEqualsASCII(parts[0], "exclude")) { + if (parts.size() == 2 && base::LowerCaseEqualsASCII(parts[0], "exclude")) { ExclusionRule rule; rule.hostname_pattern = base::StringToLowerASCII(parts[1]); exclusion_rules_.push_back(rule); @@ -80,7 +80,7 @@ bool HostMappingRules::AddRuleFromString(const std::string& rule_string) { } // Test for MAP rule. - if (parts.size() == 3 && LowerCaseEqualsASCII(parts[0], "map")) { + if (parts.size() == 3 && base::LowerCaseEqualsASCII(parts[0], "map")) { MapRule rule; rule.hostname_pattern = base::StringToLowerASCII(parts[1]); diff --git a/net/base/net_util.cc b/net/base/net_util.cc index 028b56b..f8615cc 100644 --- a/net/base/net_util.cc +++ b/net/base/net_util.cc @@ -290,7 +290,7 @@ NET_EXPORT bool IsPortAllowedForScheme(int port, } // FTP requests have an extra set of whitelisted schemes. - if (LowerCaseEqualsASCII(url_scheme, url::kFtpScheme)) { + if (base::LowerCaseEqualsASCII(url_scheme, url::kFtpScheme)) { for (int allowed_ftp_port : kAllowedFtpPorts) { if (allowed_ftp_port == port) return true; diff --git a/net/filter/filter.cc b/net/filter/filter.cc index b49aa34..4df0191 100644 --- a/net/filter/filter.cc +++ b/net/filter/filter.cc @@ -184,12 +184,12 @@ bool Filter::FlushStreamBuffer(int stream_data_len) { Filter::FilterType Filter::ConvertEncodingToType( const std::string& filter_type) { FilterType type_id; - if (LowerCaseEqualsASCII(filter_type, kDeflate)) { + if (base::LowerCaseEqualsASCII(filter_type, kDeflate)) { type_id = FILTER_TYPE_DEFLATE; - } else if (LowerCaseEqualsASCII(filter_type, kGZip) || - LowerCaseEqualsASCII(filter_type, kXGZip)) { + } else if (base::LowerCaseEqualsASCII(filter_type, kGZip) || + base::LowerCaseEqualsASCII(filter_type, kXGZip)) { type_id = FILTER_TYPE_GZIP; - } else if (LowerCaseEqualsASCII(filter_type, kSdch)) { + } else if (base::LowerCaseEqualsASCII(filter_type, kSdch)) { type_id = FILTER_TYPE_SDCH; } else { // Note we also consider "identity" and "uncompressed" UNSUPPORTED as diff --git a/net/http/http_auth.cc b/net/http/http_auth.cc index 3525114..7752d74 100644 --- a/net/http/http_auth.cc +++ b/net/http/http_auth.cc @@ -76,7 +76,8 @@ HttpAuth::AuthorizationResult HttpAuth::HandleChallengeResponse( HttpAuth::AUTHORIZATION_RESULT_INVALID; while (headers->EnumerateHeader(&iter, header_name, &challenge)) { HttpAuthChallengeTokenizer props(challenge.begin(), challenge.end()); - if (!LowerCaseEqualsASCII(props.scheme(), current_scheme_name.c_str())) + if (!base::LowerCaseEqualsASCII(props.scheme(), + current_scheme_name.c_str())) continue; authorization_result = handler->HandleAnotherChallenge(&props); if (authorization_result != HttpAuth::AUTHORIZATION_RESULT_INVALID) { diff --git a/net/http/http_auth_gssapi_posix.cc b/net/http/http_auth_gssapi_posix.cc index 9b902af..388cc64 100644 --- a/net/http/http_auth_gssapi_posix.cc +++ b/net/http/http_auth_gssapi_posix.cc @@ -688,8 +688,8 @@ void HttpAuthGSSAPI::Delegate() { HttpAuth::AuthorizationResult HttpAuthGSSAPI::ParseChallenge( HttpAuthChallengeTokenizer* tok) { // Verify the challenge's auth-scheme. - if (!LowerCaseEqualsASCII(tok->scheme(), - base::StringToLowerASCII(scheme_).c_str())) + if (!base::LowerCaseEqualsASCII(tok->scheme(), + base::StringToLowerASCII(scheme_).c_str())) return HttpAuth::AUTHORIZATION_RESULT_INVALID; std::string encoded_auth_token = tok->base64_param(); diff --git a/net/http/http_auth_handler_basic.cc b/net/http/http_auth_handler_basic.cc index b1624b8..27b892b 100644 --- a/net/http/http_auth_handler_basic.cc +++ b/net/http/http_auth_handler_basic.cc @@ -40,7 +40,7 @@ bool ParseRealm(const HttpAuthChallengeTokenizer& tokenizer, realm->clear(); HttpUtil::NameValuePairsIterator parameters = tokenizer.param_pairs(); while (parameters.GetNext()) { - if (!LowerCaseEqualsASCII(parameters.name(), "realm")) + if (!base::LowerCaseEqualsASCII(parameters.name(), "realm")) continue; if (!ConvertToUtf8AndNormalize(parameters.value(), kCharsetLatin1, realm)) { @@ -62,7 +62,7 @@ bool HttpAuthHandlerBasic::Init(HttpAuthChallengeTokenizer* challenge) { bool HttpAuthHandlerBasic::ParseChallenge( HttpAuthChallengeTokenizer* challenge) { // Verify the challenge's auth-scheme. - if (!LowerCaseEqualsASCII(challenge->scheme(), "basic")) + if (!base::LowerCaseEqualsASCII(challenge->scheme(), "basic")) return false; std::string realm; diff --git a/net/http/http_auth_handler_digest.cc b/net/http/http_auth_handler_digest.cc index 0fbd0d3..a5f96b1 100644 --- a/net/http/http_auth_handler_digest.cc +++ b/net/http/http_auth_handler_digest.cc @@ -113,7 +113,7 @@ HttpAuth::AuthorizationResult HttpAuthHandlerDigest::HandleAnotherChallenge( // to differentiate between stale and rejected responses. // Note that the state of the current handler is not mutated - this way if // there is a rejection the realm hasn't changed. - if (!LowerCaseEqualsASCII(challenge->scheme(), "digest")) + if (!base::LowerCaseEqualsASCII(challenge->scheme(), "digest")) return HttpAuth::AUTHORIZATION_RESULT_INVALID; HttpUtil::NameValuePairsIterator parameters = challenge->param_pairs(); @@ -122,10 +122,10 @@ HttpAuth::AuthorizationResult HttpAuthHandlerDigest::HandleAnotherChallenge( // for the new challenge. std::string original_realm; while (parameters.GetNext()) { - if (LowerCaseEqualsASCII(parameters.name(), "stale")) { - if (LowerCaseEqualsASCII(parameters.value(), "true")) + if (base::LowerCaseEqualsASCII(parameters.name(), "stale")) { + if (base::LowerCaseEqualsASCII(parameters.value(), "true")) return HttpAuth::AUTHORIZATION_RESULT_STALE; - } else if (LowerCaseEqualsASCII(parameters.name(), "realm")) { + } else if (base::LowerCaseEqualsASCII(parameters.name(), "realm")) { original_realm = parameters.value(); } } @@ -199,7 +199,7 @@ bool HttpAuthHandlerDigest::ParseChallenge( realm_ = original_realm_ = nonce_ = domain_ = opaque_ = std::string(); // FAIL -- Couldn't match auth-scheme. - if (!LowerCaseEqualsASCII(challenge->scheme(), "digest")) + if (!base::LowerCaseEqualsASCII(challenge->scheme(), "digest")) return false; HttpUtil::NameValuePairsIterator parameters = challenge->param_pairs(); @@ -225,38 +225,38 @@ bool HttpAuthHandlerDigest::ParseChallenge( bool HttpAuthHandlerDigest::ParseChallengeProperty(const std::string& name, const std::string& value) { - if (LowerCaseEqualsASCII(name, "realm")) { + if (base::LowerCaseEqualsASCII(name, "realm")) { std::string realm; if (!ConvertToUtf8AndNormalize(value, kCharsetLatin1, &realm)) return false; realm_ = realm; original_realm_ = value; - } else if (LowerCaseEqualsASCII(name, "nonce")) { + } else if (base::LowerCaseEqualsASCII(name, "nonce")) { nonce_ = value; - } else if (LowerCaseEqualsASCII(name, "domain")) { + } else if (base::LowerCaseEqualsASCII(name, "domain")) { domain_ = value; - } else if (LowerCaseEqualsASCII(name, "opaque")) { + } else if (base::LowerCaseEqualsASCII(name, "opaque")) { opaque_ = value; - } else if (LowerCaseEqualsASCII(name, "stale")) { + } else if (base::LowerCaseEqualsASCII(name, "stale")) { // Parse the stale boolean. - stale_ = LowerCaseEqualsASCII(value, "true"); - } else if (LowerCaseEqualsASCII(name, "algorithm")) { + stale_ = base::LowerCaseEqualsASCII(value, "true"); + } else if (base::LowerCaseEqualsASCII(name, "algorithm")) { // Parse the algorithm. - if (LowerCaseEqualsASCII(value, "md5")) { + if (base::LowerCaseEqualsASCII(value, "md5")) { algorithm_ = ALGORITHM_MD5; - } else if (LowerCaseEqualsASCII(value, "md5-sess")) { + } else if (base::LowerCaseEqualsASCII(value, "md5-sess")) { algorithm_ = ALGORITHM_MD5_SESS; } else { DVLOG(1) << "Unknown value of algorithm"; return false; // FAIL -- unsupported value of algorithm. } - } else if (LowerCaseEqualsASCII(name, "qop")) { + } else if (base::LowerCaseEqualsASCII(name, "qop")) { // Parse the comma separated list of qops. // auth is the only supported qop, and all other values are ignored. HttpUtil::ValuesIterator qop_values(value.begin(), value.end(), ','); qop_ = QOP_UNSPECIFIED; while (qop_values.GetNext()) { - if (LowerCaseEqualsASCII(qop_values.value(), "auth")) { + if (base::LowerCaseEqualsASCII(qop_values.value(), "auth")) { qop_ = QOP_AUTH; break; } diff --git a/net/http/http_auth_handler_mock.cc b/net/http/http_auth_handler_mock.cc index 35ed3ab..0c0444e 100644 --- a/net/http/http_auth_handler_mock.cc +++ b/net/http/http_auth_handler_mock.cc @@ -83,7 +83,7 @@ HttpAuth::AuthorizationResult HttpAuthHandlerMock::HandleAnotherChallenge( // challenge for a non connection based scheme, assume it's a rejection. if (!is_connection_based() || challenge->base64_param().empty()) return HttpAuth::AUTHORIZATION_RESULT_REJECT; - if (!LowerCaseEqualsASCII(challenge->scheme(), "mock")) + if (!base::LowerCaseEqualsASCII(challenge->scheme(), "mock")) return HttpAuth::AUTHORIZATION_RESULT_INVALID; return HttpAuth::AUTHORIZATION_RESULT_ACCEPT; } diff --git a/net/http/http_auth_handler_ntlm.cc b/net/http/http_auth_handler_ntlm.cc index de0fe29..51a3232 100644 --- a/net/http/http_auth_handler_ntlm.cc +++ b/net/http/http_auth_handler_ntlm.cc @@ -114,7 +114,7 @@ HttpAuth::AuthorizationResult HttpAuthHandlerNTLM::ParseChallenge( auth_data_.clear(); // Verify the challenge's auth-scheme. - if (!LowerCaseEqualsASCII(tok->scheme(), "ntlm")) + if (!base::LowerCaseEqualsASCII(tok->scheme(), "ntlm")) return HttpAuth::AUTHORIZATION_RESULT_INVALID; std::string base64_param = tok->base64_param(); diff --git a/net/http/http_auth_sspi_win.cc b/net/http/http_auth_sspi_win.cc index 56b1c8c..c935d33 100644 --- a/net/http/http_auth_sspi_win.cc +++ b/net/http/http_auth_sspi_win.cc @@ -283,8 +283,8 @@ void HttpAuthSSPI::ResetSecurityContext() { HttpAuth::AuthorizationResult HttpAuthSSPI::ParseChallenge( HttpAuthChallengeTokenizer* tok) { // Verify the challenge's auth-scheme. - if (!LowerCaseEqualsASCII(tok->scheme(), - base::StringToLowerASCII(scheme_).c_str())) + if (!base::LowerCaseEqualsASCII(tok->scheme(), + base::StringToLowerASCII(scheme_).c_str())) return HttpAuth::AUTHORIZATION_RESULT_INVALID; std::string encoded_auth_token = tok->base64_param(); diff --git a/net/http/http_cache_transaction.cc b/net/http/http_cache_transaction.cc index e3af3f7..dd4b357 100644 --- a/net/http/http_cache_transaction.cc +++ b/net/http/http_cache_transaction.cc @@ -254,7 +254,8 @@ static bool HeaderMatches(const HttpRequestHeaders& headers, HttpUtil::ValuesIterator v(header_value.begin(), header_value.end(), ','); while (v.GetNext()) { - if (LowerCaseEqualsASCII(v.value_begin(), v.value_end(), search->value)) + if (base::LowerCaseEqualsASCII(v.value_begin(), v.value_end(), + search->value)) return true; } } diff --git a/net/http/http_content_disposition.cc b/net/http/http_content_disposition.cc index 1563bec..06f815b 100644 --- a/net/http/http_content_disposition.cc +++ b/net/http/http_content_disposition.cc @@ -359,9 +359,9 @@ std::string::const_iterator HttpContentDisposition::ConsumeDispositionType( DCHECK(std::find(type_begin, type_end, '=') == type_end); - if (LowerCaseEqualsASCII(type_begin, type_end, "inline")) { + if (base::LowerCaseEqualsASCII(type_begin, type_end, "inline")) { type_ = INLINE; - } else if (LowerCaseEqualsASCII(type_begin, type_end, "attachment")) { + } else if (base::LowerCaseEqualsASCII(type_begin, type_end, "attachment")) { type_ = ATTACHMENT; } else { parse_result_flags_ |= HAS_UNKNOWN_DISPOSITION_TYPE; @@ -402,16 +402,16 @@ void HttpContentDisposition::Parse(const std::string& header, HttpUtil::NameValuePairsIterator iter(pos, end, ';'); while (iter.GetNext()) { - if (filename.empty() && LowerCaseEqualsASCII(iter.name_begin(), - iter.name_end(), - "filename")) { + if (filename.empty() && + base::LowerCaseEqualsASCII(iter.name_begin(), iter.name_end(), + "filename")) { DecodeFilenameValue(iter.value(), referrer_charset, &filename, &parse_result_flags_); if (!filename.empty()) parse_result_flags_ |= HAS_FILENAME; - } else if (ext_filename.empty() && LowerCaseEqualsASCII(iter.name_begin(), - iter.name_end(), - "filename*")) { + } else if (ext_filename.empty() && + base::LowerCaseEqualsASCII(iter.name_begin(), iter.name_end(), + "filename*")) { DecodeExtValue(iter.raw_value(), &ext_filename); if (!ext_filename.empty()) parse_result_flags_ |= HAS_EXT_FILENAME; diff --git a/net/http/http_response_headers.cc b/net/http/http_response_headers.cc index d56f8f0..257a88c 100644 --- a/net/http/http_response_headers.cc +++ b/net/http/http_response_headers.cc @@ -98,7 +98,7 @@ const char* const kNonUpdatedHeaderPrefixes[] = { bool ShouldUpdateHeader(const std::string::const_iterator& name_begin, const std::string::const_iterator& name_end) { for (size_t i = 0; i < arraysize(kNonUpdatedHeaders); ++i) { - if (LowerCaseEqualsASCII(name_begin, name_end, kNonUpdatedHeaders[i])) + if (base::LowerCaseEqualsASCII(name_begin, name_end, kNonUpdatedHeaders[i])) return false; } for (size_t i = 0; i < arraysize(kNonUpdatedHeaderPrefixes); ++i) { @@ -632,7 +632,7 @@ HttpVersion HttpResponseHeaders::ParseVersion( // TODO: (1*DIGIT apparently means one or more digits, but we only handle 1). // TODO: handle leading zeros, which is allowed by the rfc1616 sec 3.1. - if ((line_end - p < 4) || !LowerCaseEqualsASCII(p, p + 4, "http")) { + if ((line_end - p < 4) || !base::LowerCaseEqualsASCII(p, p + 4, "http")) { DVLOG(1) << "missing status line"; return HttpVersion(); } @@ -763,9 +763,8 @@ bool HttpResponseHeaders::GetCacheControlDirective(const StringPiece& directive, void* iter = NULL; while (EnumerateHeader(&iter, name, &value)) { if (value.size() > directive_size + 1 && - LowerCaseEqualsASCII(value.begin(), - value.begin() + directive_size, - directive.begin()) && + base::LowerCaseEqualsASCII( + value.begin(), value.begin() + directive_size, directive.begin()) && value[directive_size] == '=') { int64 seconds; base::StringToInt64( @@ -1232,7 +1231,7 @@ bool HttpResponseHeaders::IsKeepAlive() const { std::string token; while (EnumerateHeader(&iterator, header, &token)) { for (const KeepAliveToken& keep_alive_token : kKeepAliveTokens) { - if (LowerCaseEqualsASCII(token, keep_alive_token.token)) + if (base::LowerCaseEqualsASCII(token, keep_alive_token.token)) return keep_alive_token.keep_alive; } } @@ -1309,9 +1308,8 @@ bool HttpResponseHeaders::GetContentRange(int64* first_byte_position, std::string::const_iterator content_range_spec_end = content_range_spec.begin() + space_position; HttpUtil::TrimLWS(&content_range_spec_begin, &content_range_spec_end); - if (!LowerCaseEqualsASCII(content_range_spec_begin, - content_range_spec_end, - "bytes")) { + if (!base::LowerCaseEqualsASCII(content_range_spec_begin, + content_range_spec_end, "bytes")) { return false; } @@ -1330,7 +1328,7 @@ bool HttpResponseHeaders::GetContentRange(int64* first_byte_position, std::string byte_range_resp_spec(byte_range_resp_spec_begin, byte_range_resp_spec_end); // If byte-range-resp-spec != "*". - if (!LowerCaseEqualsASCII(byte_range_resp_spec, "*")) { + if (!base::LowerCaseEqualsASCII(byte_range_resp_spec, "*")) { size_t minus_position = byte_range_resp_spec.find('-'); if (minus_position != std::string::npos) { // Obtain first-byte-pos. @@ -1374,7 +1372,8 @@ bool HttpResponseHeaders::GetContentRange(int64* first_byte_position, content_range_spec.end(); HttpUtil::TrimLWS(&instance_length_begin, &instance_length_end); - if (LowerCaseEqualsASCII(instance_length_begin, instance_length_end, "*")) { + if (base::LowerCaseEqualsASCII(instance_length_begin, instance_length_end, + "*")) { return false; } else if (!base::StringToInt64(StringPiece(instance_length_begin, instance_length_end), diff --git a/net/http/http_security_headers.cc b/net/http/http_security_headers.cc index aff4a30..e8e3a47 100644 --- a/net/http/http_security_headers.cc +++ b/net/http/http_security_headers.cc @@ -199,11 +199,11 @@ bool ParseHSTSHeader(const std::string& value, case DIRECTIVE_END: if (IsAsciiWhitespace(*tokenizer.token_begin())) continue; - if (LowerCaseEqualsASCII(tokenizer.token(), "max-age")) { + if (base::LowerCaseEqualsASCII(tokenizer.token(), "max-age")) { state = AFTER_MAX_AGE_LABEL; max_age_observed++; - } else if (LowerCaseEqualsASCII(tokenizer.token(), - "includesubdomains")) { + } else if (base::LowerCaseEqualsASCII(tokenizer.token(), + "includesubdomains")) { state = AFTER_INCLUDE_SUBDOMAINS; include_subdomains_observed++; include_subdomains_candidate = true; @@ -296,20 +296,20 @@ bool ParseHPKPHeader(const std::string& value, equals.first = Strip(equals.first); equals.second = Strip(equals.second); - if (LowerCaseEqualsASCII(equals.first, "max-age")) { + if (base::LowerCaseEqualsASCII(equals.first, "max-age")) { if (equals.second.empty() || !MaxAgeToInt(equals.second.begin(), equals.second.end(), &max_age_candidate)) { return false; } parsed_max_age = true; - } else if (LowerCaseEqualsASCII(equals.first, "pin-sha1")) { + } else if (base::LowerCaseEqualsASCII(equals.first, "pin-sha1")) { if (!ParseAndAppendPin(equals.second, HASH_VALUE_SHA1, &pins)) return false; - } else if (LowerCaseEqualsASCII(equals.first, "pin-sha256")) { + } else if (base::LowerCaseEqualsASCII(equals.first, "pin-sha256")) { if (!ParseAndAppendPin(equals.second, HASH_VALUE_SHA256, &pins)) return false; - } else if (LowerCaseEqualsASCII(equals.first, "includesubdomains")) { + } else if (base::LowerCaseEqualsASCII(equals.first, "includesubdomains")) { include_subdomains_candidate = true; } else { // Silently ignore unknown directives for forward compatibility. diff --git a/net/http/http_util.cc b/net/http/http_util.cc index baa2eff1..a22ee60 100644 --- a/net/http/http_util.cc +++ b/net/http/http_util.cc @@ -118,13 +118,14 @@ void HttpUtil::ParseContentType(const std::string& content_type_str, DCHECK(param_value_begin <= tokenizer.token_end()); TrimLWS(¶m_value_begin, ¶m_value_end); - if (LowerCaseEqualsASCII(param_name_begin, param_name_end, "charset")) { + if (base::LowerCaseEqualsASCII(param_name_begin, param_name_end, + "charset")) { // TODO(abarth): Refactor this function to consistently use iterators. charset_val = param_value_begin - begin; charset_end = param_value_end - begin; type_has_charset = true; - } else if (LowerCaseEqualsASCII(param_name_begin, param_name_end, - "boundary")) { + } else if (base::LowerCaseEqualsASCII(param_name_begin, param_name_end, + "boundary")) { if (boundary) boundary->assign(param_value_begin, param_value_end); } @@ -160,9 +161,9 @@ void HttpUtil::ParseContentType(const std::string& content_type_str, content_type_str != "*/*" && content_type_str.find_first_of('/') != std::string::npos) { // Common case here is that mime_type is empty - bool eq = !mime_type->empty() && LowerCaseEqualsASCII(begin + type_val, - begin + type_end, - mime_type->data()); + bool eq = !mime_type->empty() && + base::LowerCaseEqualsASCII(begin + type_val, begin + type_end, + mime_type->data()); if (!eq) { mime_type->assign(begin + type_val, begin + type_end); base::StringToLowerASCII(mime_type); @@ -190,7 +191,7 @@ bool HttpUtil::ParseRanges(const std::string& headers, while (it.GetNext()) { // Look for "Range" header. - if (!LowerCaseEqualsASCII(it.name(), "range")) + if (!base::LowerCaseEqualsASCII(it.name(), "range")) continue; ranges_specifier = it.values(); // We just care about the first "Range" header, so break here. @@ -219,7 +220,7 @@ bool HttpUtil::ParseRangeHeader(const std::string& ranges_specifier, TrimLWS(&bytes_unit_begin, &bytes_unit_end); // "bytes" unit identifier is not found. - if (!LowerCaseEqualsASCII(bytes_unit_begin, bytes_unit_end, "bytes")) + if (!base::LowerCaseEqualsASCII(bytes_unit_begin, bytes_unit_end, "bytes")) return false; ValuesIterator byte_range_set_iterator(byte_range_set_begin, @@ -385,8 +386,8 @@ std::string HttpUtil::StripHeaders(const std::string& headers, while (it.GetNext()) { bool should_remove = false; for (size_t i = 0; i < headers_to_remove_len; ++i) { - if (LowerCaseEqualsASCII(it.name_begin(), it.name_end(), - headers_to_remove[i])) { + if (base::LowerCaseEqualsASCII(it.name_begin(), it.name_end(), + headers_to_remove[i])) { should_remove = true; break; } @@ -421,7 +422,8 @@ bool HttpUtil::IsNonCoalescingHeader(std::string::const_iterator name_begin, "strict-transport-security" }; for (size_t i = 0; i < arraysize(kNonCoalescingHeaders); ++i) { - if (LowerCaseEqualsASCII(name_begin, name_end, kNonCoalescingHeaders[i])) + if (base::LowerCaseEqualsASCII(name_begin, name_end, + kNonCoalescingHeaders[i])) return true; } return false; @@ -535,7 +537,7 @@ int HttpUtil::LocateStartOfStatusLine(const char* buf, int buf_len) { if (buf_len >= http_len) { int i_max = std::min(buf_len - http_len, slop); for (int i = 0; i <= i_max; ++i) { - if (LowerCaseEqualsASCII(buf + i, buf + i + http_len, "http")) + if (base::LowerCaseEqualsASCII(buf + i, buf + i + http_len, "http")) return i; } } @@ -731,7 +733,7 @@ bool HttpUtil::HasStrongValidators(HttpVersion version, std::string::const_iterator i = etag_header.begin(); std::string::const_iterator j = etag_header.begin() + slash; TrimLWS(&i, &j); - if (!LowerCaseEqualsASCII(i, j, "w")) + if (!base::LowerCaseEqualsASCII(i, j, "w")) return true; } @@ -832,7 +834,7 @@ bool HttpUtil::HeadersIterator::AdvanceTo(const char* name) { << "the header name must be in all lower case"; while (GetNext()) { - if (LowerCaseEqualsASCII(name_begin_, name_end_, name)) { + if (base::LowerCaseEqualsASCII(name_begin_, name_end_, name)) { return true; } } diff --git a/net/proxy/proxy_bypass_rules.cc b/net/proxy/proxy_bypass_rules.cc index 5344b82..bb163ad 100644 --- a/net/proxy/proxy_bypass_rules.cc +++ b/net/proxy/proxy_bypass_rules.cc @@ -264,7 +264,7 @@ bool ProxyBypassRules::AddRuleFromStringInternal( // This is the special syntax used by WinInet's bypass list -- we allow it // on all platforms and interpret it the same way. - if (LowerCaseEqualsASCII(raw, "<local>")) { + if (base::LowerCaseEqualsASCII(raw, "<local>")) { AddRuleToBypassLocal(); return true; } diff --git a/net/proxy/proxy_script_fetcher_impl.cc b/net/proxy/proxy_script_fetcher_impl.cc index 1b9ed9f..81961f8 100644 --- a/net/proxy/proxy_script_fetcher_impl.cc +++ b/net/proxy/proxy_script_fetcher_impl.cc @@ -40,7 +40,7 @@ bool IsPacMimeType(const std::string& mime_type) { "application/x-javascript-config", }; for (size_t i = 0; i < arraysize(kSupportedPacMimeTypes); ++i) { - if (LowerCaseEqualsASCII(mime_type, kSupportedPacMimeTypes[i])) + if (base::LowerCaseEqualsASCII(mime_type, kSupportedPacMimeTypes[i])) return true; } return false; diff --git a/net/proxy/proxy_server.cc b/net/proxy/proxy_server.cc index 1e8bba9..c65df31 100644 --- a/net/proxy/proxy_server.cc +++ b/net/proxy/proxy_server.cc @@ -21,23 +21,23 @@ namespace { ProxyServer::Scheme GetSchemeFromPacTypeInternal( std::string::const_iterator begin, std::string::const_iterator end) { - if (LowerCaseEqualsASCII(begin, end, "proxy")) + if (base::LowerCaseEqualsASCII(begin, end, "proxy")) return ProxyServer::SCHEME_HTTP; - if (LowerCaseEqualsASCII(begin, end, "socks")) { + if (base::LowerCaseEqualsASCII(begin, end, "socks")) { // Default to v4 for compatibility. This is because the SOCKS4 vs SOCKS5 // notation didn't originally exist, so if a client returns SOCKS they // really meant SOCKS4. return ProxyServer::SCHEME_SOCKS4; } - if (LowerCaseEqualsASCII(begin, end, "socks4")) + if (base::LowerCaseEqualsASCII(begin, end, "socks4")) return ProxyServer::SCHEME_SOCKS4; - if (LowerCaseEqualsASCII(begin, end, "socks5")) + if (base::LowerCaseEqualsASCII(begin, end, "socks5")) return ProxyServer::SCHEME_SOCKS5; - if (LowerCaseEqualsASCII(begin, end, "direct")) + if (base::LowerCaseEqualsASCII(begin, end, "direct")) return ProxyServer::SCHEME_DIRECT; - if (LowerCaseEqualsASCII(begin, end, "https")) + if (base::LowerCaseEqualsASCII(begin, end, "https")) return ProxyServer::SCHEME_HTTPS; - if (LowerCaseEqualsASCII(begin, end, "quic")) + if (base::LowerCaseEqualsASCII(begin, end, "quic")) return ProxyServer::SCHEME_QUIC; return ProxyServer::SCHEME_INVALID; @@ -48,19 +48,19 @@ ProxyServer::Scheme GetSchemeFromPacTypeInternal( // ProxyServer::ToURI(). If no type could be matched, returns SCHEME_INVALID. ProxyServer::Scheme GetSchemeFromURIInternal(std::string::const_iterator begin, std::string::const_iterator end) { - if (LowerCaseEqualsASCII(begin, end, "http")) + if (base::LowerCaseEqualsASCII(begin, end, "http")) return ProxyServer::SCHEME_HTTP; - if (LowerCaseEqualsASCII(begin, end, "socks4")) + if (base::LowerCaseEqualsASCII(begin, end, "socks4")) return ProxyServer::SCHEME_SOCKS4; - if (LowerCaseEqualsASCII(begin, end, "socks")) + if (base::LowerCaseEqualsASCII(begin, end, "socks")) return ProxyServer::SCHEME_SOCKS5; - if (LowerCaseEqualsASCII(begin, end, "socks5")) + if (base::LowerCaseEqualsASCII(begin, end, "socks5")) return ProxyServer::SCHEME_SOCKS5; - if (LowerCaseEqualsASCII(begin, end, "direct")) + if (base::LowerCaseEqualsASCII(begin, end, "direct")) return ProxyServer::SCHEME_DIRECT; - if (LowerCaseEqualsASCII(begin, end, "https")) + if (base::LowerCaseEqualsASCII(begin, end, "https")) return ProxyServer::SCHEME_HTTPS; - if (LowerCaseEqualsASCII(begin, end, "quic")) + if (base::LowerCaseEqualsASCII(begin, end, "quic")) return ProxyServer::SCHEME_QUIC; return ProxyServer::SCHEME_INVALID; } diff --git a/net/test/url_request/url_request_slow_download_job.cc b/net/test/url_request/url_request_slow_download_job.cc index d736097..74d1934 100644 --- a/net/test/url_request/url_request_slow_download_job.cc +++ b/net/test/url_request/url_request_slow_download_job.cc @@ -115,9 +115,11 @@ URLRequestSlowDownloadJob::URLRequestSlowDownloadJob( } void URLRequestSlowDownloadJob::StartAsync() { - if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str())) + if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, + request_->url().spec().c_str())) URLRequestSlowDownloadJob::FinishPendingRequests(); - if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) + if (base::LowerCaseEqualsASCII(kErrorDownloadUrl, + request_->url().spec().c_str())) URLRequestSlowDownloadJob::ErrorPendingRequests(); NotifyHeadersComplete(); @@ -178,9 +180,10 @@ URLRequestSlowDownloadJob::FillBufferHelper(IOBuffer* buf, bool URLRequestSlowDownloadJob::ReadRawData(IOBuffer* buf, int buf_size, int* bytes_read) { - if (LowerCaseEqualsASCII(kFinishDownloadUrl, - request_->url().spec().c_str()) || - LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { + if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, + request_->url().spec().c_str()) || + base::LowerCaseEqualsASCII(kErrorDownloadUrl, + request_->url().spec().c_str())) { VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; *bytes_read = 0; return true; @@ -247,9 +250,10 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst( HttpResponseInfo* info) const { // Send back mock headers. std::string raw_headers; - if (LowerCaseEqualsASCII(kFinishDownloadUrl, - request_->url().spec().c_str()) || - LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { + if (base::LowerCaseEqualsASCII(kFinishDownloadUrl, + request_->url().spec().c_str()) || + base::LowerCaseEqualsASCII(kErrorDownloadUrl, + request_->url().spec().c_str())) { raw_headers.append( "HTTP/1.1 200 OK\n" "Content-type: text/plain\n"); @@ -259,7 +263,8 @@ void URLRequestSlowDownloadJob::GetResponseInfoConst( "Content-type: application/octet-stream\n" "Cache-Control: max-age=0\n"); - if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { + if (base::LowerCaseEqualsASCII(kKnownSizeUrl, + request_->url().spec().c_str())) { raw_headers.append(base::StringPrintf( "Content-Length: %d\n", kFirstDownloadSize + kSecondDownloadSize)); } diff --git a/net/url_request/url_request_file_job.cc b/net/url_request/url_request_file_job.cc index 864001e..17bc642 100644 --- a/net/url_request/url_request_file_job.cc +++ b/net/url_request/url_request_file_job.cc @@ -140,7 +140,7 @@ bool URLRequestFileJob::IsRedirectResponse(GURL* location, #if defined(OS_WIN) // Follow a Windows shortcut. // We just resolve .lnk file, ignore others. - if (!LowerCaseEqualsASCII(file_path_.Extension(), ".lnk")) + if (!base::LowerCaseEqualsASCII(file_path_.Extension(), ".lnk")) return false; base::FilePath new_path = file_path_; @@ -161,8 +161,9 @@ bool URLRequestFileJob::IsRedirectResponse(GURL* location, Filter* URLRequestFileJob::SetupFilter() const { // Bug 9936 - .svgz files needs to be decompressed. - return LowerCaseEqualsASCII(file_path_.Extension(), ".svgz") - ? Filter::GZipFactory() : NULL; + return base::LowerCaseEqualsASCII(file_path_.Extension(), ".svgz") + ? Filter::GZipFactory() + : NULL; } bool URLRequestFileJob::GetMimeType(std::string* mime_type) const { diff --git a/net/url_request/url_request_job_manager.cc b/net/url_request/url_request_job_manager.cc index b2392e8..39c21b7 100644 --- a/net/url_request/url_request_job_manager.cc +++ b/net/url_request/url_request_job_manager.cc @@ -143,7 +143,7 @@ URLRequestJob* URLRequestJobManager::MaybeInterceptResponse( // static bool URLRequestJobManager::SupportsScheme(const std::string& scheme) { for (size_t i = 0; i < arraysize(kBuiltinFactories); ++i) { - if (LowerCaseEqualsASCII(scheme, kBuiltinFactories[i].scheme)) + if (base::LowerCaseEqualsASCII(scheme, kBuiltinFactories[i].scheme)) return true; } diff --git a/net/websockets/websocket_basic_handshake_stream.cc b/net/websockets/websocket_basic_handshake_stream.cc index 7bb34a3..1261e1c 100644 --- a/net/websockets/websocket_basic_handshake_stream.cc +++ b/net/websockets/websocket_basic_handshake_stream.cc @@ -142,7 +142,7 @@ bool ValidateUpgrade(const HttpResponseHeaders* headers, return false; } - if (!LowerCaseEqualsASCII(value, websockets::kWebSocketLowercase)) { + if (!base::LowerCaseEqualsASCII(value, websockets::kWebSocketLowercase)) { *failure_message = "'Upgrade' header value is not 'WebSocket': " + value; return false; |