summaryrefslogtreecommitdiffstats
path: root/chrome/browser/net
diff options
context:
space:
mode:
authorestade@chromium.org <estade@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-01-23 01:51:23 +0000
committerestade@chromium.org <estade@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-01-23 01:51:23 +0000
commitb1c33f8b46f7f2b2a4489609c4b8d6e0d6905f25 (patch)
tree960b966701601a19072ea06648e19bc991060d35 /chrome/browser/net
parent3efc4679d44aa442d4ae35ed95dff3ef0cf5ded6 (diff)
downloadchromium_src-b1c33f8b46f7f2b2a4489609c4b8d6e0d6905f25.zip
chromium_src-b1c33f8b46f7f2b2a4489609c4b8d6e0d6905f25.tar.gz
chromium_src-b1c33f8b46f7f2b2a4489609c4b8d6e0d6905f25.tar.bz2
Fix strings in url_fixer_upper.
* change URLs from wstring to string * change filenames from wstring to FilePath * url_fixer_upper{_unittest}.cc building on posix Review URL: http://codereview.chromium.org/18525 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@8536 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/browser/net')
-rw-r--r--chrome/browser/net/url_fixer_upper.cc218
-rw-r--r--chrome/browser/net/url_fixer_upper.h12
-rw-r--r--chrome/browser/net/url_fixer_upper_unittest.cc268
3 files changed, 281 insertions, 217 deletions
diff --git a/chrome/browser/net/url_fixer_upper.cc b/chrome/browser/net/url_fixer_upper.cc
index 1aca150..ec908cd 100644
--- a/chrome/browser/net/url_fixer_upper.cc
+++ b/chrome/browser/net/url_fixer_upper.cc
@@ -3,7 +3,6 @@
// found in the LICENSE file.
#include <algorithm>
-#include <windows.h>
#include "chrome/browser/net/url_fixer_upper.h"
@@ -23,23 +22,26 @@
using namespace std;
// does some basic fixes for input that we want to test for file-ness
-static void PrepareStringForFileOps(const wstring& text, wstring* output) {
- TrimWhitespace(text, TRIM_ALL, output);
+static void PrepareStringForFileOps(const FilePath& text,
+ FilePath::StringType* output) {
+ TrimWhitespace(text.value(), TRIM_ALL, output);
+#if defined(OS_WIN)
replace(output->begin(), output->end(), '/', '\\');
+#endif
}
// Tries to create a full path from |text|. If the result is valid and the
// file exists, returns true and sets |full_path| to the result. Otherwise,
// returns false and leaves |full_path| unchanged.
-static bool ValidPathForFile(const wstring& text, wstring* full_path) {
- wchar_t file_path[MAX_PATH];
- if (!_wfullpath(file_path, text.c_str(), MAX_PATH))
- return false;
+static bool ValidPathForFile(const FilePath::StringType& text,
+ FilePath* full_path) {
+ FilePath file_path(text);
+ file_util::AbsolutePath(&file_path);
if (!file_util::PathExists(file_path))
return false;
- full_path->assign(file_path);
+ *full_path = file_path;
return true;
}
@@ -49,19 +51,26 @@ static bool ValidPathForFile(const wstring& text, wstring* full_path) {
// with a drive specifier or "\\". Returns false in other cases (including
// file: URLs: these don't look like filenames), leaving fixed_up_url
// unchanged.
-static wstring FixupPath(const wstring& text) {
+static string FixupPath(const string& text) {
DCHECK(text.length() >= 2);
- wstring filename;
- PrepareStringForFileOps(text, &filename);
+ FilePath::StringType filename;
+#if defined(OS_WIN)
+ FilePath input_path(UTF8ToWide(text));
+#elif defined(OS_POSIX)
+ FilePath input_path(text);
+#endif
+ PrepareStringForFileOps(input_path, &filename);
if (filename[1] == '|')
filename[1] = ':';
// Here, we know the input looks like a file.
- GURL file_url = net::FilePathToFileURL(filename);
- if (file_url.is_valid())
- return gfx::ElideUrl(file_url, ChromeFont(), 0, std::wstring());
+ GURL file_url = net::FilePathToFileURL(FilePath(filename));
+ if (file_url.is_valid()) {
+ return WideToUTF8(gfx::GetCleanStringFromUrl(file_url, std::wstring(),
+ NULL, NULL));
+ }
// Invalid file URL, just return the input.
return text;
@@ -70,13 +79,13 @@ static wstring FixupPath(const wstring& text) {
// Checks |domain| to see if a valid TLD is already present. If not, appends
// |desired_tld| to the domain, and prepends "www." unless it's already present.
// Then modifies |fixed_up_url| to reflect the changes.
-static void AddDesiredTLD(const wstring& desired_tld,
- wstring* domain) {
+static void AddDesiredTLD(const string& desired_tld,
+ string* domain) {
if (desired_tld.empty() || domain->empty())
return;
// Check the TLD. If the return value is positive, we already have a TLD, so
- // abort; if the return value is wstring::npos, there's no valid host (e.g. if
+ // abort; if the return value is string::npos, there's no valid host (e.g. if
// the user pasted in garbage for which HistoryURLProvider is trying to
// suggest an exact match), so adding a TLD makes no sense. The only useful
// case is where the return value is 0 (there's a valid host with no known
@@ -96,16 +105,16 @@ static void AddDesiredTLD(const wstring& desired_tld,
domain->append(desired_tld);
// Now, if the domain begins with "www.", stop.
- const wstring prefix(L"www.");
+ const string prefix("www.");
if (domain->compare(0, prefix.length(), prefix) != 0) {
// Otherwise, add www. to the beginning of the URL.
domain->insert(0, prefix);
}
}
-static inline void FixupUsername(const wstring& text,
+static inline void FixupUsername(const string& text,
const url_parse::Component& part,
- wstring* url) {
+ string* url) {
if (!part.is_valid())
return;
@@ -115,22 +124,22 @@ static inline void FixupUsername(const wstring& text,
// password. FixupURL itself will append the '@' for us.
}
-static inline void FixupPassword(const wstring& text,
+static inline void FixupPassword(const string& text,
const url_parse::Component& part,
- wstring* url) {
+ string* url) {
if (!part.is_valid())
return;
// We don't fix up the password at the moment.
- url->append(L":");
+ url->append(":");
url->append(text, part.begin, part.len);
}
-static void FixupHost(const wstring& text,
+static void FixupHost(const string& text,
const url_parse::Component& part,
bool has_scheme,
- const wstring& desired_tld,
- wstring* url) {
+ const string& desired_tld,
+ string* url) {
if (!part.is_valid())
return;
@@ -138,12 +147,12 @@ static void FixupHost(const wstring& text,
// Strip all leading dots and all but one trailing dot, unless the user only
// typed dots, in which case their input is totally invalid and we should just
// leave it unchanged.
- wstring domain(text, part.begin, part.len);
+ string domain(text, part.begin, part.len);
const size_t first_nondot(domain.find_first_not_of('.'));
- if (first_nondot != wstring::npos) {
+ if (first_nondot != string::npos) {
domain.erase(0, first_nondot);
size_t last_nondot(domain.find_last_not_of('.'));
- DCHECK(last_nondot != wstring::npos);
+ DCHECK(last_nondot != string::npos);
last_nondot += 2; // Point at second period in ending string
if (last_nondot < domain.length())
domain.erase(last_nondot);
@@ -159,15 +168,15 @@ static void FixupHost(const wstring& text,
// something invalid (which cannot be fixed up) is found, like ":foo" or
// ":7:7", returns false. Otherwise, removes any extra colons
// ("::1337" -> ":1337", ":/" -> "/") and returns true.
-static void FixupPort(const wstring& text,
+static void FixupPort(const string& text,
const url_parse::Component& part,
- wstring* url) {
+ string* url) {
if (!part.is_valid())
return;
// Look for non-digit in port and strip if found.
- wstring port(text, part.begin, part.len);
- for (wstring::iterator i = port.begin(); i != port.end(); ) {
+ string port(text, part.begin, part.len);
+ for (string::iterator i = port.begin(); i != port.end(); ) {
if (IsAsciiDigit(*i))
++i;
else
@@ -175,18 +184,18 @@ static void FixupPort(const wstring& text,
}
if (port.empty())
- return; // Nothing to append.
+ return; // Nothing to append.
- url->append(L":");
+ url->append(":");
url->append(port);
}
-static inline void FixupPath(const wstring& text,
+static inline void FixupPath(const string& text,
const url_parse::Component& part,
- wstring* url) {
+ string* url) {
if (!part.is_valid() || part.len == 0) {
// We should always have a path.
- url->append(L"/");
+ url->append("/");
return;
}
@@ -194,25 +203,25 @@ static inline void FixupPath(const wstring& text,
url->append(text, part.begin, part.len);
}
-static inline void FixupQuery(const wstring& text,
+static inline void FixupQuery(const string& text,
const url_parse::Component& part,
- wstring* url) {
+ string* url) {
if (!part.is_valid())
return;
// We don't fix up the query at the moment.
- url->append(L"?");
+ url->append("?");
url->append(text, part.begin, part.len);
}
-static inline void FixupRef(const wstring& text,
+static inline void FixupRef(const string& text,
const url_parse::Component& part,
- wstring* url) {
+ string* url) {
if (!part.is_valid())
return;
// We don't fix up the ref at the moment.
- url->append(L"#");
+ url->append("#");
url->append(text, part.begin, part.len);
}
@@ -229,9 +238,8 @@ static void OffsetComponent(int offset, url_parse::Component* part) {
}
}
-static bool HasPort(const std::wstring& original_text,
- const url_parse::Component& scheme_component,
- const std::wstring& scheme) {
+static bool HasPort(const std::string& original_text,
+ const url_parse::Component& scheme_component) {
// Find the range between the ":" and the "/".
size_t port_start = scheme_component.end() + 1;
size_t port_end = port_start;
@@ -250,23 +258,28 @@ static bool HasPort(const std::wstring& original_text,
return true;
}
-wstring URLFixerUpper::SegmentURL(const wstring& text,
- url_parse::Parsed* parts) {
+string URLFixerUpper::SegmentURL(const string& text,
+ url_parse::Parsed* parts) {
// Initialize the result.
*parts = url_parse::Parsed();
- wstring trimmed;
+ string trimmed;
TrimWhitespace(text, TRIM_ALL, &trimmed);
if (trimmed.empty())
- return wstring(); // Nothing to segment.
+ return string(); // Nothing to segment.
+#if defined(OS_WIN)
int trimmed_length = static_cast<int>(trimmed.length());
- if (url_parse::DoesBeginWindowsDriveSpec(trimmed.data(), 0, trimmed_length)
- || url_parse::DoesBeginUNCPath(trimmed.data(), 0, trimmed_length, false))
- return L"file";
+ if (url_parse::DoesBeginWindowsDriveSpec(trimmed.data(), 0, trimmed_length) ||
+ url_parse::DoesBeginUNCPath(trimmed.data(), 0, trimmed_length, false))
+ return "file";
+#elif defined(OS_POSIX)
+ if (FilePath::IsSeparator(trimmed.c_str()[0]))
+ return "file";
+#endif
// Otherwise, we need to look at things carefully.
- wstring scheme;
+ string scheme;
if (url_parse::ExtractScheme(text.data(),
static_cast<int>(text.length()),
&parts->scheme)) {
@@ -279,11 +292,11 @@ wstring URLFixerUpper::SegmentURL(const wstring& text,
(!IsStringASCII(scheme) ||
// We need to fix up the segmentation for "www.example.com:/". For this
// case, we guess that schemes with a "." are not actually schemes.
- (scheme.find(L".") != wstring::npos) ||
+ (scheme.find(".") != wstring::npos) ||
// We need to fix up the segmentation for "www:123/". For this case, we
// will add an HTTP scheme later and make the URL parser happy.
// TODO(pkasting): Maybe we should try to use GURL's parser for this?
- HasPort(text, parts->scheme, scheme)))
+ HasPort(text, parts->scheme)))
parts->scheme.reset();
}
@@ -291,15 +304,15 @@ wstring URLFixerUpper::SegmentURL(const wstring& text,
// we choose http, but if the URL starts with "ftp.", we match other browsers
// and choose ftp.
if (!parts->scheme.is_valid())
- scheme.assign(StartsWith(text, L"ftp.", false) ? L"ftp" : L"http");
+ scheme.assign(StartsWithASCII(text, "ftp.", false) ? "ftp" : "http");
// Cannonicalize the scheme.
StringToLowerASCII(&scheme);
// Not segmenting file schemes or nonstandard schemes.
- if ((scheme == L"file") ||
+ if ((scheme == "file") ||
!url_util::IsStandard(scheme.c_str(), static_cast<int>(scheme.length()),
- url_parse::Component(0, static_cast<int>(scheme.length()))))
+ url_parse::Component(0, static_cast<int>(scheme.length()))))
return scheme;
if (parts->scheme.is_valid()) {
@@ -311,14 +324,14 @@ wstring URLFixerUpper::SegmentURL(const wstring& text,
// We need to add a scheme in order for ParseStandardURL to be happy.
// Find the first non-whitespace character.
- wstring::const_iterator first_nonwhite = text.begin();
+ string::const_iterator first_nonwhite = text.begin();
while ((first_nonwhite != text.end()) && IsWhitespace(*first_nonwhite))
++first_nonwhite;
// Construct the text to parse by inserting the scheme.
- wstring inserted_text(scheme);
- inserted_text.append(L"://");
- wstring text_to_parse(text.begin(), first_nonwhite);
+ string inserted_text(scheme);
+ inserted_text.append("://");
+ string text_to_parse(text.begin(), first_nonwhite);
text_to_parse.append(inserted_text);
text_to_parse.append(first_nonwhite, text.end());
@@ -341,26 +354,26 @@ wstring URLFixerUpper::SegmentURL(const wstring& text,
return scheme;
}
-std::wstring URLFixerUpper::FixupURL(const wstring& text,
- const wstring& desired_tld) {
- wstring trimmed;
+string URLFixerUpper::FixupURL(const string& text,
+ const string& desired_tld) {
+ string trimmed;
TrimWhitespace(text, TRIM_ALL, &trimmed);
if (trimmed.empty())
- return wstring(); // Nothing here.
+ return string(); // Nothing here.
// Segment the URL.
url_parse::Parsed parts;
- wstring scheme(SegmentURL(trimmed, &parts));
+ string scheme(SegmentURL(trimmed, &parts));
// We handle the file scheme separately.
- if (scheme == L"file")
+ if (scheme == "file")
return (parts.scheme.is_valid() ? text : FixupPath(text));
// For some schemes whose layouts we understand, we rebuild it.
if (url_util::IsStandard(scheme.c_str(), static_cast<int>(scheme.length()),
url_parse::Component(0, static_cast<int>(scheme.length())))) {
- wstring url(scheme);
- url.append(L"://");
+ string url(scheme);
+ url.append("://");
// We need to check whether the |username| is valid because it is our
// responsibility to append the '@' to delineate the user information from
@@ -368,7 +381,7 @@ std::wstring URLFixerUpper::FixupURL(const wstring& text,
if (parts.username.is_valid()) {
FixupUsername(trimmed, parts.username, &url);
FixupPassword(trimmed, parts.password, &url);
- url.append(L"@");
+ url.append("@");
}
FixupHost(trimmed, parts.host, parts.scheme.is_valid(), desired_tld, &url);
@@ -382,8 +395,8 @@ std::wstring URLFixerUpper::FixupURL(const wstring& text,
// In the worst-case, we insert a scheme if the URL lacks one.
if (!parts.scheme.is_valid()) {
- wstring fixed_scheme(scheme);
- fixed_scheme.append(L"://");
+ string fixed_scheme(scheme);
+ fixed_scheme.append("://");
trimmed.insert(0, fixed_scheme);
}
@@ -395,45 +408,72 @@ std::wstring URLFixerUpper::FixupURL(const wstring& text,
// fixup will look for cues that it is actually a file path before trying to
// figure out what file it is. If our logic doesn't work, we will fall back on
// regular fixup.
-wstring URLFixerUpper::FixupRelativeFile(const wstring& base_dir,
- const wstring& text) {
- wchar_t old_cur_directory[MAX_PATH];
+string URLFixerUpper::FixupRelativeFile(const FilePath& base_dir,
+ const FilePath& text) {
+ FilePath old_cur_directory;
if (!base_dir.empty()) {
- // save the old current directory before we move to the new one
- // TODO: in the future, we may want to handle paths longer than MAX_PATH
- GetCurrentDirectory(MAX_PATH, old_cur_directory);
- SetCurrentDirectory(base_dir.c_str());
+ // Save the old current directory before we move to the new one.
+ file_util::GetCurrentDirectory(&old_cur_directory);
+ file_util::SetCurrentDirectory(base_dir);
}
- // allow funny input with extra whitespace and the wrong kind of slashes
- wstring trimmed;
+ // Allow funny input with extra whitespace and the wrong kind of slashes.
+ FilePath::StringType trimmed;
PrepareStringForFileOps(text, &trimmed);
bool is_file = true;
- wstring full_path;
+ FilePath full_path;
if (!ValidPathForFile(trimmed, &full_path)) {
// Not a path as entered, try unescaping it in case the user has
// escaped things. We need to go through 8-bit since the escaped values
// only represent 8-bit values.
+#if defined(OS_WIN)
std::wstring unescaped = UTF8ToWide(UnescapeURLComponent(
WideToUTF8(trimmed),
UnescapeRule::SPACES | UnescapeRule::URL_SPECIAL_CHARS));
+#elif defined(OS_POSIX)
+ std::string unescaped = UnescapeURLComponent(
+ trimmed,
+ UnescapeRule::SPACES | UnescapeRule::URL_SPECIAL_CHARS);
+#endif
+
if (!ValidPathForFile(unescaped, &full_path))
is_file = false;
}
// Put back the current directory if we saved it.
- if (!base_dir.empty())
- SetCurrentDirectory(old_cur_directory);
+ if (!base_dir.empty()) {
+ file_util::SetCurrentDirectory(old_cur_directory);
+ }
if (is_file) {
GURL file_url = net::FilePathToFileURL(full_path);
if (file_url.is_valid())
- return gfx::ElideUrl(file_url, ChromeFont(), 0, std::wstring());
+ return WideToUTF8(gfx::GetCleanStringFromUrl(file_url, std::wstring(),
+ NULL, NULL));
// Invalid files fall through to regular processing.
}
// Fall back on regular fixup for this input.
- return FixupURL(text, L"");
+#if defined(OS_WIN)
+ string text_utf8 = WideToUTF8(text.value());
+#elif defined(OS_POSIX)
+ string text_utf8 = text.value();
+#endif
+ return FixupURL(text_utf8, "");
}
+// Deprecated functions. To be removed when all callers are updated.
+wstring URLFixerUpper::SegmentURL(const wstring& text,
+ url_parse::Parsed* parts) {
+ return UTF8ToWide(SegmentURL(WideToUTF8(text), parts));
+}
+wstring URLFixerUpper::FixupURL(const wstring& text,
+ const wstring& desired_tld) {
+ return UTF8ToWide(FixupURL(WideToUTF8(text), WideToUTF8(desired_tld)));
+}
+wstring URLFixerUpper::FixupRelativeFile(const wstring& base_dir,
+ const wstring& text) {
+ return UTF8ToWide(FixupRelativeFile(FilePath::FromWStringHack(base_dir),
+ FilePath::FromWStringHack(text)));
+}
diff --git a/chrome/browser/net/url_fixer_upper.h b/chrome/browser/net/url_fixer_upper.h
index c7f3f00..07cb162 100644
--- a/chrome/browser/net/url_fixer_upper.h
+++ b/chrome/browser/net/url_fixer_upper.h
@@ -9,6 +9,8 @@
#include "googleurl/src/url_parse.h"
+class FilePath;
+
// This object is designed to convert various types of input into URLs that we
// know are valid. For example, user typing in the URL bar or command line
// options. This is NOT the place for converting between different types of
@@ -18,8 +20,11 @@ namespace URLFixerUpper {
// Segments the given text string into parts of a URL. This is most useful
// for schemes such as http, https, and ftp where |SegmentURL| will find many
// segments. Currently does not segment "file" schemes.
+ std::string SegmentURL(const std::string& text, url_parse::Parsed* parts);
+ // Deprecated temporary compatibility function.
std::wstring SegmentURL(const std::wstring& text, url_parse::Parsed* parts);
+
// Converts |text| to a fixed-up URL and returns it. Attempts to make
// some "smart" adjustments to obviously-invalid input where possible.
// |text| may be an absolute path to a file, which will get converted to a
@@ -34,6 +39,9 @@ namespace URLFixerUpper {
// |desired_tld| to the domain and prepend "www." (unless it, or a scheme,
// are already present.) This TLD should not have a leading '.' (use "com"
// instead of ".com").
+ std::string FixupURL(const std::string& text,
+ const std::string& desired_tld);
+ // Deprecated temporary compatibility function.
std::wstring FixupURL(const std::wstring& text,
const std::wstring& desired_tld);
@@ -46,8 +54,12 @@ namespace URLFixerUpper {
// For "regular" input, even if it is possibly a file with a full path, you
// should use FixupURL() directly. This function should only be used when
// relative path handling is desired, as for command line processing.
+ std::string FixupRelativeFile(const FilePath& base_dir,
+ const FilePath& text);
+ // Deprecated temporary compatibility function.
std::wstring FixupRelativeFile(const std::wstring& base_dir,
const std::wstring& text);
+
};
#endif // #ifndef CHROME_BROWSER_URL_FIXER_UPPER_H__
diff --git a/chrome/browser/net/url_fixer_upper_unittest.cc b/chrome/browser/net/url_fixer_upper_unittest.cc
index 024060c..1e6dbc7 100644
--- a/chrome/browser/net/url_fixer_upper_unittest.cc
+++ b/chrome/browser/net/url_fixer_upper_unittest.cc
@@ -3,9 +3,9 @@
// found in the LICENSE file.
#include <stdlib.h>
-#include <windows.h>
#include "base/basictypes.h"
+#include "base/file_util.h"
#include "base/logging.h"
#include "base/path_service.h"
#include "base/string_util.h"
@@ -26,8 +26,8 @@ std::ostream& operator<<(std::ostream& os, const url_parse::Component& part) {
}
struct segment_case {
- const std::wstring input;
- const std::wstring result;
+ const std::string input;
+ const std::string result;
const url_parse::Component scheme;
const url_parse::Component username;
const url_parse::Component password;
@@ -39,7 +39,7 @@ struct segment_case {
};
static const segment_case segment_cases[] = {
- { L"http://www.google.com/", L"http",
+ { "http://www.google.com/", "http",
url_parse::Component(0, 4), // scheme
url_parse::Component(), // username
url_parse::Component(), // password
@@ -49,7 +49,7 @@ static const segment_case segment_cases[] = {
url_parse::Component(), // query
url_parse::Component(), // ref
},
- { L"aBoUt:vErSiOn", L"about",
+ { "aBoUt:vErSiOn", "about",
url_parse::Component(0, 5), // scheme
url_parse::Component(), // username
url_parse::Component(), // password
@@ -59,7 +59,7 @@ static const segment_case segment_cases[] = {
url_parse::Component(), // query
url_parse::Component(), // ref
},
- { L" www.google.com:124?foo#", L"http",
+ { " www.google.com:124?foo#", "http",
url_parse::Component(), // scheme
url_parse::Component(), // username
url_parse::Component(), // password
@@ -69,7 +69,7 @@ static const segment_case segment_cases[] = {
url_parse::Component(23, 3), // query
url_parse::Component(27, 0), // ref
},
- { L"user@www.google.com", L"http",
+ { "user@www.google.com", "http",
url_parse::Component(), // scheme
url_parse::Component(0, 4), // username
url_parse::Component(), // password
@@ -79,7 +79,7 @@ static const segment_case segment_cases[] = {
url_parse::Component(), // query
url_parse::Component(), // ref
},
- { L"ftp:/user:P:a$$Wd@..ftp.google.com...::23///pub?foo#bar", L"ftp",
+ { "ftp:/user:P:a$$Wd@..ftp.google.com...::23///pub?foo#bar", "ftp",
url_parse::Component(0, 3), // scheme
url_parse::Component(5, 4), // username
url_parse::Component(10, 7), // password
@@ -92,10 +92,10 @@ static const segment_case segment_cases[] = {
};
TEST(URLFixerUpperTest, SegmentURL) {
- std::wstring result;
+ std::string result;
url_parse::Parsed parts;
- for (int i = 0; i < arraysize(segment_cases); ++i) {
+ for (size_t i = 0; i < arraysize(segment_cases); ++i) {
segment_case value = segment_cases[i];
result = URLFixerUpper::SegmentURL(value.input, &parts);
EXPECT_EQ(value.result, result);
@@ -115,76 +115,74 @@ TEST(URLFixerUpperTest, SegmentURL) {
// full_path = "c:\foo\bar.txt"
// dir = "c:\foo"
// file_name = "bar.txt"
-static bool MakeTempFile(const std::wstring& dir,
- const std::wstring& file_name,
- std::wstring* full_path) {
- *full_path = dir + L"\\" + file_name;
-
- HANDLE hfile = CreateFile(full_path->c_str(), GENERIC_READ | GENERIC_WRITE,
- 0, NULL, CREATE_ALWAYS, 0, NULL);
- if (hfile == NULL || hfile == INVALID_HANDLE_VALUE)
- return false;
- CloseHandle(hfile);
- return true;
+static bool MakeTempFile(const FilePath& dir,
+ const FilePath& file_name,
+ FilePath* full_path) {
+ *full_path = dir.Append(file_name);
+ return file_util::WriteFile(full_path->ToWStringHack(), NULL, 0) == 0;
}
// Returns true if the given URL is a file: URL that matches the given file
-static bool IsMatchingFileURL(const std::wstring& url,
- const std::wstring& full_file_path) {
+static bool IsMatchingFileURL(const std::string& url,
+ const FilePath& full_file_path) {
if (url.length() <= 8)
return false;
- if (std::wstring(L"file:///") != url.substr(0, 8))
+ if (std::string("file:///") != url.substr(0, 8))
return false; // no file:/// prefix
- if (url.find('\\') != std::wstring::npos)
+ if (url.find('\\') != std::string::npos)
return false; // contains backslashes
- std::wstring derived_path;
+ FilePath derived_path;
net::FileURLToFilePath(GURL(url), &derived_path);
- return (derived_path.length() == full_file_path.length()) &&
- std::equal(derived_path.begin(), derived_path.end(),
- full_file_path.begin(), CaseInsensitiveCompare<wchar_t>());
+
+ FilePath::StringType derived_path_str = derived_path.value();
+ return (derived_path_str.length() == full_file_path.value().length()) &&
+ std::equal(derived_path_str.begin(),
+ derived_path_str.end(),
+ full_file_path.value().begin(),
+ CaseInsensitiveCompare<FilePath::CharType>());
}
struct fixup_case {
- const std::wstring input;
- const std::wstring desired_tld;
- const std::wstring output;
+ const std::string input;
+ const std::string desired_tld;
+ const std::string output;
} fixup_cases[] = {
- {L"www.google.com", L"", L"http://www.google.com/"},
- {L" www.google.com ", L"", L"http://www.google.com/"},
- {L" foo.com/asdf bar", L"", L"http://foo.com/asdf bar"},
- {L"..www.google.com..", L"", L"http://www.google.com./"},
- {L"http://......", L"", L"http://....../"},
- {L"http://host.com:ninety-two/", L"", L"http://host.com/"},
- {L"http://host.com:ninety-two?foo", L"", L"http://host.com/?foo"},
- {L"google.com:123", L"", L"http://google.com:123/"},
- {L"about:", L"", L"about:"},
- {L"about:version", L"", L"about:version"},
- {L"www:123", L"", L"http://www:123/"},
- {L" www:123", L"", L"http://www:123/"},
- {L"www.google.com?foo", L"", L"http://www.google.com/?foo"},
- {L"www.google.com#foo", L"", L"http://www.google.com/#foo"},
- {L"www.google.com?", L"", L"http://www.google.com/?"},
- {L"www.google.com#", L"", L"http://www.google.com/#"},
- {L"www.google.com:123?foo#bar", L"", L"http://www.google.com:123/?foo#bar"},
- {L"user@www.google.com", L"", L"http://user@www.google.com/"},
- {L"\x6C34.com", L"", L"http://\x6C34.com/" },
+ {"www.google.com", "", "http://www.google.com/"},
+ {" www.google.com ", "", "http://www.google.com/"},
+ {" foo.com/asdf bar", "", "http://foo.com/asdf bar"},
+ {"..www.google.com..", "", "http://www.google.com./"},
+ {"http://......", "", "http://....../"},
+ {"http://host.com:ninety-two/", "", "http://host.com/"},
+ {"http://host.com:ninety-two?foo", "", "http://host.com/?foo"},
+ {"google.com:123", "", "http://google.com:123/"},
+ {"about:", "", "about:"},
+ {"about:version", "", "about:version"},
+ {"www:123", "", "http://www:123/"},
+ {" www:123", "", "http://www:123/"},
+ {"www.google.com?foo", "", "http://www.google.com/?foo"},
+ {"www.google.com#foo", "", "http://www.google.com/#foo"},
+ {"www.google.com?", "", "http://www.google.com/?"},
+ {"www.google.com#", "", "http://www.google.com/#"},
+ {"www.google.com:123?foo#bar", "", "http://www.google.com:123/?foo#bar"},
+ {"user@www.google.com", "", "http://user@www.google.com/"},
+ {"\xE6\xB0\xB4.com" , "", "http://\xE6\xB0\xB4.com/"},
// It would be better if this next case got treated as http, but I don't see
// a clean way to guess this isn't the new-and-exciting "user" scheme.
- {L"user:passwd@www.google.com:8080/", L"", L"user:passwd@www.google.com:8080/"},
- //{L"file:///c:/foo/bar%20baz.txt", L"", L"file:///C:/foo/bar%20baz.txt"},
- {L"ftp.google.com", L"", L"ftp://ftp.google.com/"},
- {L" ftp.google.com", L"", L"ftp://ftp.google.com/"},
- {L"FTP.GooGle.com", L"", L"ftp://FTP.GooGle.com/"},
- {L"ftpblah.google.com", L"", L"http://ftpblah.google.com/"},
- {L"ftp", L"", L"http://ftp/"},
- {L"google.ftp.com", L"", L"http://google.ftp.com/"},
+ {"user:passwd@www.google.com:8080/", "", "user:passwd@www.google.com:8080/"},
+ //{"file:///c:/foo/bar%20baz.txt", "", "file:///C:/foo/bar%20baz.txt"},
+ {"ftp.google.com", "", "ftp://ftp.google.com/"},
+ {" ftp.google.com", "", "ftp://ftp.google.com/"},
+ {"FTP.GooGle.com", "", "ftp://FTP.GooGle.com/"},
+ {"ftpblah.google.com", "", "http://ftpblah.google.com/"},
+ {"ftp", "", "http://ftp/"},
+ {"google.ftp.com", "", "http://google.ftp.com/"},
};
TEST(URLFixerUpperTest, FixupURL) {
- std::wstring output;
+ std::string output;
- for (int i = 0; i < arraysize(fixup_cases); ++i) {
+ for (size_t i = 0; i < arraysize(fixup_cases); ++i) {
fixup_case value = fixup_cases[i];
output = URLFixerUpper::FixupURL(value.input, value.desired_tld);
EXPECT_EQ(value.output, output);
@@ -192,25 +190,25 @@ TEST(URLFixerUpperTest, FixupURL) {
// Check the TLD-appending functionality
fixup_case tld_cases[] = {
- {L"google", L"com", L"http://www.google.com/"},
- {L"google.", L"com", L"http://www.google.com/"},
- {L"google..", L"com", L"http://www.google.com/"},
- {L".google", L"com", L"http://www.google.com/"},
- {L"www.google", L"com", L"http://www.google.com/"},
- {L"google.com", L"com", L"http://google.com/"},
- {L"http://google", L"com", L"http://www.google.com/"},
- {L"..google..", L"com", L"http://www.google.com/"},
- {L"http://www.google", L"com", L"http://www.google.com/"},
- {L"google/foo", L"com", L"http://www.google.com/foo"},
- {L"google.com/foo", L"com", L"http://google.com/foo"},
- {L"google/?foo=.com", L"com", L"http://www.google.com/?foo=.com"},
- {L"www.google/?foo=www.", L"com", L"http://www.google.com/?foo=www."},
- {L"google.com/?foo=.com", L"com", L"http://google.com/?foo=.com"},
- {L"http://www.google.com", L"com", L"http://www.google.com/"},
- {L"google:123", L"com", L"http://www.google.com:123/"},
- {L"http://google:123", L"com", L"http://www.google.com:123/"},
+ {"google", "com", "http://www.google.com/"},
+ {"google.", "com", "http://www.google.com/"},
+ {"google..", "com", "http://www.google.com/"},
+ {".google", "com", "http://www.google.com/"},
+ {"www.google", "com", "http://www.google.com/"},
+ {"google.com", "com", "http://google.com/"},
+ {"http://google", "com", "http://www.google.com/"},
+ {"..google..", "com", "http://www.google.com/"},
+ {"http://www.google", "com", "http://www.google.com/"},
+ {"google/foo", "com", "http://www.google.com/foo"},
+ {"google.com/foo", "com", "http://google.com/foo"},
+ {"google/?foo=.com", "com", "http://www.google.com/?foo=.com"},
+ {"www.google/?foo=www.", "com", "http://www.google.com/?foo=www."},
+ {"google.com/?foo=.com", "com", "http://google.com/?foo=.com"},
+ {"http://www.google.com", "com", "http://www.google.com/"},
+ {"google:123", "com", "http://www.google.com:123/"},
+ {"http://google:123", "com", "http://www.google.com:123/"},
};
- for (int i = 0; i < arraysize(tld_cases); ++i) {
+ for (size_t i = 0; i < arraysize(tld_cases); ++i) {
fixup_case value = tld_cases[i];
output = URLFixerUpper::FixupURL(value.input, value.desired_tld);
EXPECT_EQ(value.output, output);
@@ -222,113 +220,127 @@ TEST(URLFixerUpperTest, FixupURL) {
// has to exist.
TEST(URLFixerUpperTest, FixupFile) {
// this "original" filename is the one we tweak to get all the variations
- std::wstring dir;
- std::wstring original;
+ FilePath dir;
+ FilePath original;
ASSERT_TRUE(PathService::Get(chrome::DIR_APP, &dir));
- ASSERT_TRUE(MakeTempFile(dir, L"url fixer upper existing file.txt",
+ ASSERT_TRUE(MakeTempFile(dir,
+ FilePath(FILE_PATH_LITERAL("url fixer upper existing file.txt")),
&original));
// reference path
- std::wstring golden =
- UTF8ToWide(net::FilePathToFileURL(original).spec());
+ std::string golden = net::FilePathToFileURL(original).spec();
// c:\foo\bar.txt -> file:///c:/foo/bar.txt (basic)
- std::wstring fixedup = URLFixerUpper::FixupURL(original, L"");
+#if defined(OS_WIN)
+ std::string fixedup = URLFixerUpper::FixupURL(WideToUTF8(original.value()), "");
+#elif defined(OS_POSIX)
+ std::string fixedup = URLFixerUpper::FixupURL(original.value(), "");
+#endif
EXPECT_EQ(golden, fixedup);
+ // TODO(port): Make some equivalent tests for posix.
+#if defined(OS_WIN)
// c|/foo\bar.txt -> file:///c:/foo/bar.txt (pipe allowed instead of colon)
- std::wstring cur(original);
+ std::string cur(WideToUTF8(original.value()));
EXPECT_EQ(':', cur[1]);
cur[1] = '|';
- fixedup = URLFixerUpper::FixupURL(cur, L"");
+ fixedup = URLFixerUpper::FixupURL(cur, "");
EXPECT_EQ(golden, fixedup);
fixup_case file_cases[] = {
// File URLs go through GURL, which tries to escape intelligently.
- {L"c:\\This%20is a non-existent file.txt", L"", L"file:///C:/This%2520is%20a%20non-existent%20file.txt"},
+ {"c:\\This%20is a non-existent file.txt", "", "file:///C:/This%2520is%20a%20non-existent%20file.txt"},
// \\foo\bar.txt -> file://foo/bar.txt
// UNC paths, this file won't exist, but since there are no escapes, it
// should be returned just converted to a file: URL.
- {L"\\\\SomeNonexistentHost\\foo\\bar.txt", L"", L"file://somenonexistenthost/foo/bar.txt"},
- {L"//SomeNonexistentHost\\foo/bar.txt", L"", L"file://somenonexistenthost/foo/bar.txt"},
- {L"file:///C:/foo/bar", L"", L"file:///C:/foo/bar"},
+ {"\\\\SomeNonexistentHost\\foo\\bar.txt", "", "file://somenonexistenthost/foo/bar.txt"},
+ {"//SomeNonexistentHost\\foo/bar.txt", "", "file://somenonexistenthost/foo/bar.txt"},
+ {"file:///C:/foo/bar", "", "file:///C:/foo/bar"},
// These are fixups we don't do, but could consider:
//
- // {L"file://C:/foo/bar", L"", L"file:///C:/foo/bar"},
- // {L"file:c:", L"", L"file:///c:/"},
- // {L"file:c:WINDOWS", L"", L"file:///c:/WINDOWS"},
- // {L"file:c|Program Files", L"", L"file:///c:/Program Files"},
- // {L"file:///foo:/bar", L"", L"file://foo/bar"},
- // {L"file:/file", L"", L"file://file/"},
- // {L"file:////////c:\\foo", L"", L"file:///c:/foo"},
- // {L"file://server/folder/file", L"", L"file://server/folder/file"},
- // {L"file:/\\/server\\folder/file", L"", L"file://server/folder/file"},
+ // {"file://C:/foo/bar", "", "file:///C:/foo/bar"},
+ // {"file:c:", "", "file:///c:/"},
+ // {"file:c:WINDOWS", "", "file:///c:/WINDOWS"},
+ // {"file:c|Program Files", "", "file:///c:/Program Files"},
+ // {"file:///foo:/bar", "", "file://foo/bar"},
+ // {"file:/file", "", "file://file/"},
+ // {"file:////////c:\\foo", "", "file:///c:/foo"},
+ // {"file://server/folder/file", "", "file://server/folder/file"},
+ // {"file:/\\/server\\folder/file", "", "file://server/folder/file"},
};
- for (int i = 0; i < arraysize(file_cases); i++) {
+ for (size_t i = 0; i < arraysize(file_cases); i++) {
fixedup = URLFixerUpper::FixupURL(file_cases[i].input,
file_cases[i].desired_tld);
EXPECT_EQ(file_cases[i].output, fixedup);
}
+#endif
- EXPECT_TRUE(DeleteFile(original.c_str()));
+ EXPECT_TRUE(file_util::Delete(original, false));
}
TEST(URLFixerUpperTest, FixupRelativeFile) {
- std::wstring full_path, dir;
- std::wstring file_part(L"url_fixer_upper_existing_file.txt");
+ FilePath full_path, dir;
+ FilePath file_part(FILE_PATH_LITERAL("url_fixer_upper_existing_file.txt"));
ASSERT_TRUE(PathService::Get(chrome::DIR_APP, &dir));
ASSERT_TRUE(MakeTempFile(dir, file_part, &full_path));
// make sure we pass through good URLs
- std::wstring fixedup;
- for (int i = 0; i < arraysize(fixup_cases); ++i) {
+ std::string fixedup;
+ for (size_t i = 0; i < arraysize(fixup_cases); ++i) {
fixup_case value = fixup_cases[i];
- fixedup = URLFixerUpper::FixupRelativeFile(dir, value.input);
+#if defined(OS_WIN)
+ FilePath input(UTF8ToWide(value.input));
+#elif defined(OS_POSIX)
+ FilePath input(value.input);
+#endif
+ fixedup = URLFixerUpper::FixupRelativeFile(dir, input);
EXPECT_EQ(value.output, fixedup);
}
// make sure the existing file got fixed-up to a file URL, and that there
// are no backslashes
fixedup = URLFixerUpper::FixupRelativeFile(dir, file_part);
- EXPECT_PRED2(IsMatchingFileURL, fixedup, full_path);
- EXPECT_TRUE(DeleteFile(full_path.c_str()));
+ EXPECT_TRUE(IsMatchingFileURL(fixedup, full_path));
+ EXPECT_TRUE(file_util::Delete(full_path, false));
// create a filename we know doesn't exist and make sure it doesn't get
// fixed up to a file URL
- std::wstring nonexistent_file(L"url_fixer_upper_nonexistent_file.txt");
+ FilePath nonexistent_file(FILE_PATH_LITERAL("url_fixer_upper_nonexistent_file.txt"));
fixedup = URLFixerUpper::FixupRelativeFile(dir, nonexistent_file);
- EXPECT_NE(std::wstring(L"file:///"), fixedup.substr(0, 8));
+ EXPECT_NE(std::string("file:///"), fixedup.substr(0, 8));
EXPECT_FALSE(IsMatchingFileURL(fixedup, nonexistent_file));
// make a subdir to make sure relative paths with directories work, also
// test spaces: "app_dir\url fixer-upper dir\url fixer-upper existing file.txt"
- std::wstring sub_dir(L"url fixer-upper dir");
- std::wstring sub_file(L"url fixer-upper existing file.txt");
- std::wstring new_dir = dir + L"\\" + sub_dir;
- CreateDirectory(new_dir.c_str(), NULL);
+ FilePath sub_dir(FILE_PATH_LITERAL("url fixer-upper dir"));
+ FilePath sub_file(FILE_PATH_LITERAL("url fixer-upper existing file.txt"));
+ FilePath new_dir = dir.Append(sub_dir);
+ file_util::CreateDirectory(new_dir);
ASSERT_TRUE(MakeTempFile(new_dir, sub_file, &full_path));
// test file in the subdir
- std::wstring relative_file = sub_dir + L"\\" + sub_file;
+ FilePath relative_file = sub_dir.Append(sub_file);
fixedup = URLFixerUpper::FixupRelativeFile(dir, relative_file);
- EXPECT_PRED2(IsMatchingFileURL, fixedup, full_path);
+ EXPECT_TRUE(IsMatchingFileURL(fixedup, full_path));
- // test file in the subdir with different slashes and escaping
- relative_file = sub_dir + L"/" + sub_file;
- ReplaceSubstringsAfterOffset(&relative_file, 0, L" ", L"%20");
- fixedup = URLFixerUpper::FixupRelativeFile(dir, relative_file);
- EXPECT_PRED2(IsMatchingFileURL, fixedup, full_path);
+ // test file in the subdir with different slashes and escaping.
+ FilePath::StringType relative_file_str = sub_dir.value() +
+ FILE_PATH_LITERAL("/") + sub_file.value();
+ ReplaceSubstringsAfterOffset(&relative_file_str, 0,
+ FILE_PATH_LITERAL(" "), FILE_PATH_LITERAL("%20"));
+ fixedup = URLFixerUpper::FixupRelativeFile(dir, FilePath(relative_file_str));
+ EXPECT_TRUE(IsMatchingFileURL(fixedup, full_path));
// test relative directories and duplicate slashes
// (should resolve to the same file as above)
- relative_file = sub_dir + L"\\../" + sub_dir + L"\\\\\\.\\" + sub_file;
- fixedup = URLFixerUpper::FixupRelativeFile(dir, relative_file);
- EXPECT_PRED2(IsMatchingFileURL, fixedup, full_path);
+ relative_file_str = sub_dir.value() + FILE_PATH_LITERAL("/../") +
+ sub_dir.value() + FILE_PATH_LITERAL("///./") + sub_file.value();
+ fixedup = URLFixerUpper::FixupRelativeFile(dir, FilePath(relative_file_str));
+ EXPECT_TRUE(IsMatchingFileURL(fixedup, full_path));
// done with the subdir
- EXPECT_TRUE(DeleteFile(full_path.c_str()));
- EXPECT_TRUE(RemoveDirectory(new_dir.c_str()));
+ EXPECT_TRUE(file_util::Delete(full_path, false));
+ EXPECT_TRUE(file_util::Delete(new_dir, true));
}
-