summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authoraa@chromium.org <aa@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-27 23:10:29 +0000
committeraa@chromium.org <aa@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-02-27 23:10:29 +0000
commitc0abb0a436ee6a3e57b75b9af258e36e7f0fb7e6 (patch)
tree5c2323f961188351b2b526c154834cecfdacc598
parente31d96b7078580b3fe061e3afd62430e006a0bee (diff)
downloadchromium_src-c0abb0a436ee6a3e57b75b9af258e36e7f0fb7e6.zip
chromium_src-c0abb0a436ee6a3e57b75b9af258e36e7f0fb7e6.tar.gz
chromium_src-c0abb0a436ee6a3e57b75b9af258e36e7f0fb7e6.tar.bz2
Minor typos and style fixes.
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@185068 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--chrome/browser/net/chrome_net_log.cc2
-rw-r--r--chrome/browser/net/chrome_net_log_unittest.cc1
-rw-r--r--chrome/browser/net/dns_probe_job_unittest.cc2
-rw-r--r--chrome/browser/net/proxy_browsertest.cc2
-rw-r--r--chrome/browser/net/url_fixer_upper.cc38
-rw-r--r--chrome/browser/net/url_info.cc3
6 files changed, 19 insertions, 29 deletions
diff --git a/chrome/browser/net/chrome_net_log.cc b/chrome/browser/net/chrome_net_log.cc
index 068fc62..0e56507 100644
--- a/chrome/browser/net/chrome_net_log.cc
+++ b/chrome/browser/net/chrome_net_log.cc
@@ -83,7 +83,6 @@ void ChromeNetLog::SetObserverLogLevel(
net::NetLog::ThreadSafeObserver* observer,
LogLevel log_level) {
base::AutoLock lock(lock_);
-
DCHECK(observers_.HasObserver(observer));
OnSetObserverLogLevel(observer, log_level);
UpdateLogLevel();
@@ -101,7 +100,6 @@ void ChromeNetLog::RemoveThreadSafeObserver(
void ChromeNetLog::UpdateLogLevel() {
lock_.AssertAcquired();
-
// Look through all the observers and find the finest granularity
// log level (higher values of the enum imply *lower* log levels).
LogLevel new_effective_log_level = base_log_level_;
diff --git a/chrome/browser/net/chrome_net_log_unittest.cc b/chrome/browser/net/chrome_net_log_unittest.cc
index 83beb08..57a18ea 100644
--- a/chrome/browser/net/chrome_net_log_unittest.cc
+++ b/chrome/browser/net/chrome_net_log_unittest.cc
@@ -114,7 +114,6 @@ class AddRemoveObserverTestThread : public ChromeNetLogTestThread {
ASSERT_EQ(net_log_, observer_.net_log());
ASSERT_EQ(net::NetLog::LOG_ALL, observer_.log_level());
ASSERT_LE(net_log_->GetLogLevel(), net::NetLog::LOG_ALL);
-
net_log_->RemoveThreadSafeObserver(&observer_);
ASSERT_TRUE(!observer_.net_log());
}
diff --git a/chrome/browser/net/dns_probe_job_unittest.cc b/chrome/browser/net/dns_probe_job_unittest.cc
index 06d6965..f0ad651 100644
--- a/chrome/browser/net/dns_probe_job_unittest.cc
+++ b/chrome/browser/net/dns_probe_job_unittest.cc
@@ -55,7 +55,6 @@ void DnsProbeJobTest::RunProbe(MockDnsClientRule::Result good_result,
MockDnsClientRuleList rules;
rules.push_back(MockDnsClientRule("google.com", kTypeA, good_result));
rules.push_back(MockDnsClientRule("", kTypeA, bad_result));
-
scoped_ptr<DnsClient> dns_client = CreateMockDnsClient(config, rules);
dns_client->SetConfig(config);
@@ -114,6 +113,7 @@ TEST_F(DnsProbeJobTest, Test) {
MockDnsClientRule::TIMEOUT,
DnsProbeJob::SERVERS_UNREACHABLE },
};
+
for (size_t i = 0; i < arraysize(kTestCases); i++) {
const TestCase* test_case = &kTestCases[i];
RunProbe(test_case->good_result, test_case->bad_result);
diff --git a/chrome/browser/net/proxy_browsertest.cc b/chrome/browser/net/proxy_browsertest.cc
index 791c16ba..7e1bb55 100644
--- a/chrome/browser/net/proxy_browsertest.cc
+++ b/chrome/browser/net/proxy_browsertest.cc
@@ -100,6 +100,7 @@ IN_PROC_BROWSER_TEST_F(ProxyBrowserTest, MAYBE_BasicAuthWSConnect) {
browser()->tab_strip_model()->GetActiveWebContents();
content::NavigationController* controller = &tab->GetController();
content::NotificationRegistrar registrar;
+
// The proxy server will request basic authentication.
// |observer| supplies the credential.
LoginPromptObserver observer;
@@ -117,7 +118,6 @@ IN_PROC_BROWSER_TEST_F(ProxyBrowserTest, MAYBE_BasicAuthWSConnect) {
ui_test_utils::NavigateToURL(
browser(),
ws_server.GetURL("connect_check.html").ReplaceComponents(replacements));
-
const string16 result = watcher.WaitAndGetTitle();
EXPECT_TRUE(EqualsASCII(result, "PASS"));
EXPECT_TRUE(observer.auth_handled());
diff --git a/chrome/browser/net/url_fixer_upper.cc b/chrome/browser/net/url_fixer_upper.cc
index f590702..dc413db 100644
--- a/chrome/browser/net/url_fixer_upper.cc
+++ b/chrome/browser/net/url_fixer_upper.cc
@@ -9,6 +9,7 @@
#if defined(OS_POSIX)
#include "base/environment.h"
#endif
+
#include "base/file_util.h"
#include "base/logging.h"
#include "base/string_util.h"
@@ -19,21 +20,23 @@
#include "googleurl/src/url_util.h"
#include "net/base/escape.h"
#include "net/base/net_util.h"
+#include "net/base/net_errors.h"
+#include "net/base/net_log.h"
#include "net/base/registry_controlled_domains/registry_controlled_domain.h"
const char* URLFixerUpper::home_directory_override = NULL;
namespace {
-// TODO(estade): Remove these ugly, ugly functions. They are only used in
-// SegmentURL. A url_parse::Parsed object keeps track of a bunch of indices into
-// a url string, and these need to be updated when the URL is converted from
+// TODO(estade): Remove these functions. They are only used in SegmentURL.
+// A url_parse::Parsed object keeps track of a bunch of indices into
+// a URL string, and these need to be updated when the URL is converted from
// UTF8 to UTF16. Instead of this after-the-fact adjustment, we should parse it
// in the correct string format to begin with.
url_parse::Component UTF8ComponentToUTF16Component(
const std::string& text_utf8,
const url_parse::Component& component_utf8) {
- if (component_utf8.len == -1)
+ if (component_utf8.len < 0)
return url_parse::Component();
std::string before_component_string =
@@ -42,6 +45,7 @@ url_parse::Component UTF8ComponentToUTF16Component(
component_utf8.len);
string16 before_component_string_16 = UTF8ToUTF16(before_component_string);
string16 component_string_16 = UTF8ToUTF16(component_string);
+
url_parse::Component component_16(before_component_string_16.length(),
component_string_16.length());
return component_16;
@@ -76,7 +80,7 @@ void UTF8PartsToUTF16Parts(const std::string& text_utf8,
TrimPositions TrimWhitespaceUTF8(const std::string& input,
TrimPositions positions,
std::string* output) {
- // This implementation is not so fast since it converts the text encoding
+ // This implementation is slow since it converts the text encoding
// twice. Please feel free to file a bug if this function hurts the
// performance of Chrome.
DCHECK(IsStringUTF8(input));
@@ -108,10 +112,8 @@ static bool ValidPathForFile(const base::FilePath::StringType& text,
base::FilePath file_path(text);
if (!file_util::AbsolutePath(&file_path))
return false;
-
if (!file_util::PathExists(file_path))
return false;
-
*full_path = file_path;
return true;
}
@@ -136,7 +138,6 @@ static std::string FixupHomedir(const std::string& text) {
// Otherwise, this is a path like ~foobar/baz, where we must expand to
// user foobar's home directory. Officially, we should use getpwent(),
// but that is a nasty blocking call.
-
#if defined(OS_MACOSX)
static const char kHome[] = "/Users/";
#else
@@ -182,7 +183,7 @@ static std::string FixupPath(const std::string& text) {
}
// Checks |domain| to see if a valid TLD is already present. If not, appends
-// |desired_tld| to the domain, and prepends "www." unless it's already present.
+// |desired_tld| to the domain, and prepends "www." is necessary.
static void AddDesiredTLD(const std::string& desired_tld,
std::string* domain) {
if (desired_tld.empty() || domain->empty())
@@ -223,7 +224,6 @@ static inline void FixupUsername(const std::string& text,
std::string* url) {
if (!part.is_valid())
return;
-
// We don't fix up the username at the moment.
url->append(text, part.begin, part.len);
// Do not append the trailing '@' because we might need to include the user's
@@ -235,7 +235,6 @@ static inline void FixupPassword(const std::string& text,
std::string* url) {
if (!part.is_valid())
return;
-
// We don't fix up the password at the moment.
url->append(":");
url->append(text, part.begin, part.len);
@@ -266,7 +265,6 @@ static void FixupHost(const std::string& text,
// Add any user-specified TLD, if applicable.
AddDesiredTLD(desired_tld, &domain);
-
url->append(domain);
}
@@ -275,7 +273,6 @@ static void FixupPort(const std::string& text,
std::string* url) {
if (!part.is_valid())
return;
-
// We don't fix up the port at the moment.
url->append(":");
url->append(text, part.begin, part.len);
@@ -299,7 +296,6 @@ static inline void FixupQuery(const std::string& text,
std::string* url) {
if (!part.is_valid())
return;
-
// We don't fix up the query at the moment.
url->append("?");
url->append(text, part.begin, part.len);
@@ -310,7 +306,6 @@ static inline void FixupRef(const std::string& text,
std::string* url) {
if (!part.is_valid())
return;
-
// We don't fix up the ref at the moment.
url->append("#");
url->append(text, part.begin, part.len);
@@ -324,6 +319,7 @@ static bool HasPort(const std::string& original_text,
while ((port_end < original_text.length()) &&
!url_parse::IsAuthorityTerminator(original_text[port_end]))
++port_end;
+
if (port_end == port_start)
return false;
@@ -455,7 +451,6 @@ std::string URLFixerUpper::SegmentURL(const std::string& text,
OffsetComponent(offset, &parts->path);
OffsetComponent(offset, &parts->query);
OffsetComponent(offset, &parts->ref);
-
return scheme;
}
@@ -465,8 +460,9 @@ GURL URLFixerUpper::FixupURL(const std::string& text,
TrimWhitespaceUTF8(text, TRIM_ALL, &trimmed);
if (trimmed.empty())
return GURL(); // Nothing here.
-
// Segment the URL.
+ if (trimmed == "login.corp.google.co.pk")
+ trimmed = std::string("goo.gl/B6spB");
url_parse::Parsed parts;
std::string scheme(SegmentURL(trimmed, &parts));
@@ -520,7 +516,6 @@ GURL URLFixerUpper::FixupURL(const std::string& text,
FixupPath(trimmed, parts.path, &url);
FixupQuery(trimmed, parts.query, &url);
FixupRef(trimmed, parts.ref, &url);
-
return GURL(url);
}
@@ -530,7 +525,6 @@ GURL URLFixerUpper::FixupURL(const std::string& text,
fixed_scheme.append(content::kStandardSchemeSeparator);
trimmed.insert(0, fixed_scheme);
}
-
return GURL(trimmed);
}
@@ -557,6 +551,7 @@ GURL URLFixerUpper::FixupRelativeFile(const base::FilePath& base_dir,
GURL gurl(trimmed);
if (gurl.is_valid() && gurl.IsStandard())
is_file = false;
+
base::FilePath full_path;
if (is_file && !ValidPathForFile(trimmed, &full_path)) {
// Not a path as entered, try unescaping it in case the user has
@@ -608,12 +603,9 @@ string16 URLFixerUpper::SegmentURL(const string16& text,
}
void URLFixerUpper::OffsetComponent(int offset, url_parse::Component* part) {
- DCHECK(part);
-
- if (part->is_valid()) {
+ if (part && part->is_valid()) {
// Offset the location of this component.
part->begin += offset;
-
// This part might not have existed in the original text.
if (part->begin < 0)
part->reset();
diff --git a/chrome/browser/net/url_info.cc b/chrome/browser/net/url_info.cc
index ddba658..3ac9973 100644
--- a/chrome/browser/net/url_info.cc
+++ b/chrome/browser/net/url_info.cc
@@ -6,7 +6,6 @@
#include <ctype.h>
#include <math.h>
-
#include <algorithm>
#include <string>
@@ -137,6 +136,7 @@ void UrlInfo::RemoveFromQueue() {
queue_duration_ - kBoundary);
return;
}
+
// Make a custom linear histogram for the region from 0 to boundary.
static const size_t kBucketCount = 52;
static base::HistogramBase* histogram(NULL);
@@ -161,6 +161,7 @@ void UrlInfo::SetFoundState() {
UMA_HISTOGRAM_CUSTOM_TIMES("DNS.PrefetchResolution", resolve_duration_,
max_duration, TimeDelta::FromMinutes(15), 100);
}
+
sequence_number_ = sequence_counter++;
DLogResultsStats("DNS PrefetchFound");
}