summaryrefslogtreecommitdiffstats
path: root/chrome/browser/net/url_fixer_upper.cc
diff options
context:
space:
mode:
authorpkasting@chromium.org <pkasting@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-11-07 01:34:53 +0000
committerpkasting@chromium.org <pkasting@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-11-07 01:34:53 +0000
commitce85f60cd9d399109dab39fe5a9613879ab9a8f7 (patch)
tree0e9e0072d2e5eadfeec08eef0f06a43c56dc1751 /chrome/browser/net/url_fixer_upper.cc
parentd90684d0cf0aa16389c9202153c97d373829b7f3 (diff)
downloadchromium_src-ce85f60cd9d399109dab39fe5a9613879ab9a8f7.zip
chromium_src-ce85f60cd9d399109dab39fe5a9613879ab9a8f7.tar.gz
chromium_src-ce85f60cd9d399109dab39fe5a9613879ab9a8f7.tar.bz2
Fix various problems with inline autocomplete and URLs that change length during fixup:
* URLs with http auth info, which gets stripped * URLs with IDN hosts * URLs with escaped values that get unescaped In cases like these, we'd inline autocomplete from the wrong locations, highlight the wrong portions of the URL as matches, and sometimes DCHECK() in debug mode. The fix is to track how fixup affects the offsets into the URL we care about. Plumbing this required an enormous number of additions :( There is also a fix here to the URL Fixer Upper, which was obviously modified at some point in the past to use the Parsed components, but without updating the comments or some of the functionality to match. Since this isn't supposed to "fix up" things that aren't simple typos, I removed some code to "fix" bogus ports, which was causing bizarre effects when typing HTTP auth URLs ("http://foo:bar" would be fixed to "http://foo" and then matched for inline autocompletion, which was clearly wrong). This is tested incidentally by one of the new History URL Provider tests (which is how I discovered it). BUG=4010 TEST=Covered by unittests Review URL: http://codereview.chromium.org/372017 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@31352 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/browser/net/url_fixer_upper.cc')
-rw-r--r--chrome/browser/net/url_fixer_upper.cc35
1 files changed, 9 insertions, 26 deletions
diff --git a/chrome/browser/net/url_fixer_upper.cc b/chrome/browser/net/url_fixer_upper.cc
index b465268..a68bc34 100644
--- a/chrome/browser/net/url_fixer_upper.cc
+++ b/chrome/browser/net/url_fixer_upper.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -146,11 +146,10 @@ static string FixupHomedir(const string& text) {
#endif
// Tries to create a file: URL from |text| if it looks like a filename, even if
-// it doesn't resolve as a valid path or to an existing file. Returns true
-// with a (possibly invalid) file: URL in |fixed_up_url| for input beginning
-// with a drive specifier or "\\". Returns false in other cases (including
-// file: URLs: these don't look like filenames), leaving fixed_up_url
-// unchanged.
+// it doesn't resolve as a valid path or to an existing file. Returns a
+// (possibly invalid) file: URL in |fixed_up_url| for input beginning
+// with a drive specifier or "\\". Returns the unchanged input in other cases
+// (including file: URLs: these don't look like filenames).
static string FixupPath(const string& text) {
DCHECK(!text.empty());
@@ -173,7 +172,7 @@ static string FixupPath(const string& text) {
GURL file_url = net::FilePathToFileURL(FilePath(filename));
if (file_url.is_valid()) {
return WideToUTF8(net::FormatUrl(file_url, std::wstring(), true,
- UnescapeRule::NORMAL, NULL, NULL));
+ UnescapeRule::NORMAL, NULL, NULL, NULL));
}
// Invalid file URL, just return the input.
@@ -182,7 +181,6 @@ static string FixupPath(const string& text) {
// Checks |domain| to see if a valid TLD is already present. If not, appends
// |desired_tld| to the domain, and prepends "www." unless it's already present.
-// Then modifies |fixed_up_url| to reflect the changes.
static void AddDesiredTLD(const string& desired_tld,
string* domain) {
if (desired_tld.empty() || domain->empty())
@@ -268,30 +266,15 @@ static void FixupHost(const string& text,
url->append(domain);
}
-// Looks for a port number, including initial colon, at port_start. If
-// something invalid (which cannot be fixed up) is found, like ":foo" or
-// ":7:7", returns false. Otherwise, removes any extra colons
-// ("::1337" -> ":1337", ":/" -> "/") and returns true.
static void FixupPort(const string& text,
const url_parse::Component& part,
string* url) {
if (!part.is_valid())
return;
- // Look for non-digit in port and strip if found.
- string port(text, part.begin, part.len);
- for (string::iterator i = port.begin(); i != port.end();) {
- if (IsAsciiDigit(*i))
- ++i;
- else
- i = port.erase(i);
- }
-
- if (port.empty())
- return; // Nothing to append.
-
+ // We don't fix up the port at the moment.
url->append(":");
- url->append(port);
+ url->append(text, part.begin, part.len);
}
static inline void FixupPath(const string& text,
@@ -573,7 +556,7 @@ string URLFixerUpper::FixupRelativeFile(const FilePath& base_dir,
GURL file_url = net::FilePathToFileURL(full_path);
if (file_url.is_valid())
return WideToUTF8(net::FormatUrl(file_url, std::wstring(),
- true, UnescapeRule::NORMAL, NULL, NULL));
+ true, UnescapeRule::NORMAL, NULL, NULL, NULL));
// Invalid files fall through to regular processing.
}