diff options
author | maruel@google.com <maruel@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-03-10 17:39:46 +0000 |
---|---|---|
committer | maruel@google.com <maruel@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2009-03-10 17:39:46 +0000 |
commit | 72d1e597c85fbf6b45756e3c753696370c48c042 (patch) | |
tree | d01609d62295e34aa3a5721d4109f815d0fd23ad /net | |
parent | 9475d1d189c03a0626c4855b38f6004bfe7c5bb8 (diff) | |
download | chromium_src-72d1e597c85fbf6b45756e3c753696370c48c042.zip chromium_src-72d1e597c85fbf6b45756e3c753696370c48c042.tar.gz chromium_src-72d1e597c85fbf6b45756e3c753696370c48c042.tar.bz2 |
Slight code change to make some global variables const.
Fix >80 cols lines.
Review URL: http://codereview.chromium.org/42013
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@11342 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net')
-rw-r--r-- | net/base/completion_callback.h | 4 | ||||
-rw-r--r-- | net/base/cookie_policy.cc | 5 | ||||
-rw-r--r-- | net/base/data_url_unittest.cc | 7 | ||||
-rw-r--r-- | net/base/escape_unittest.cc | 28 | ||||
-rw-r--r-- | net/base/mime_sniffer.cc | 3 | ||||
-rw-r--r-- | net/base/mime_util.cc | 21 | ||||
-rw-r--r-- | net/base/net_util.cc | 6 | ||||
-rw-r--r-- | net/base/net_util_unittest.cc | 38 | ||||
-rw-r--r-- | net/base/nss_memio.c | 26 | ||||
-rw-r--r-- | net/base/ssl_client_socket_nss.cc | 4 | ||||
-rw-r--r-- | net/base/telnet_server.h | 28 | ||||
-rw-r--r-- | net/http/http_cache.cc | 4 | ||||
-rw-r--r-- | net/http/http_cache_unittest.cc | 6 | ||||
-rw-r--r-- | net/http/http_chunked_decoder_unittest.cc | 5 | ||||
-rw-r--r-- | net/http/http_response_headers.h | 4 | ||||
-rw-r--r-- | net/http/http_response_headers_unittest.cc | 6 | ||||
-rw-r--r-- | net/proxy/proxy_service_unittest.cc | 4 | ||||
-rw-r--r-- | net/tools/testserver/testserver.py | 68 | ||||
-rw-r--r-- | net/url_request/url_request_ftp_job.h | 27 | ||||
-rw-r--r-- | net/url_request/url_request_job_tracker.cc | 5 |
20 files changed, 184 insertions, 115 deletions
diff --git a/net/base/completion_callback.h b/net/base/completion_callback.h index 4013f71..7a5655b 100644 --- a/net/base/completion_callback.h +++ b/net/base/completion_callback.h @@ -19,7 +19,8 @@ class CompletionCallbackImpl : public CallbackImpl< T, void (T::*)(int), Tuple1<int> > { public: CompletionCallbackImpl(T* obj, void (T::* meth)(int)) - : CallbackImpl< T, void (T::*)(int), Tuple1<int> >::CallbackImpl(obj, meth) { + : CallbackImpl< T, void (T::*)(int), + Tuple1<int> >::CallbackImpl(obj, meth) { } }; @@ -56,4 +57,3 @@ class CancelableCompletionCallback : } // namespace net #endif // NET_BASE_COMPLETION_CALLBACK_H__ - diff --git a/net/base/cookie_policy.cc b/net/base/cookie_policy.cc index 3d82259..d18db63 100644 --- a/net/base/cookie_policy.cc +++ b/net/base/cookie_policy.cc @@ -31,8 +31,8 @@ bool CookiePolicy::CanSetCookie(const GURL& url, const GURL& policy_url) { case CookiePolicy::BLOCK_THIRD_PARTY_COOKIES: if (policy_url.is_empty()) return true; // Empty policy URL should indicate a first-party request - - return net::RegistryControlledDomainService::SameDomainOrHost(url, policy_url); + return net::RegistryControlledDomainService::SameDomainOrHost(url, + policy_url); case CookiePolicy::BLOCK_ALL_COOKIES: return false; default: @@ -45,4 +45,3 @@ CookiePolicy::CookiePolicy() : type_(CookiePolicy::ALLOW_ALL_COOKIES) { } } // namespace net - diff --git a/net/base/data_url_unittest.cc b/net/base/data_url_unittest.cc index 374554a..b7c2dc0 100644 --- a/net/base/data_url_unittest.cc +++ b/net/base/data_url_unittest.cc @@ -72,7 +72,8 @@ TEST(DataURLTest, Parse) { "kk", "boo" }, - { "data:text/html,%3Chtml%3E%3Cbody%3E%3Cb%3Ehello%20world%3C%2Fb%3E%3C%2Fbody%3E%3C%2Fhtml%3E", + { "data:text/html,%3Chtml%3E%3Cbody%3E%3Cb%3Ehello%20world" + "%3C%2Fb%3E%3C%2Fbody%3E%3C%2Fhtml%3E", true, "text/html", "US-ASCII", @@ -123,7 +124,8 @@ TEST(DataURLTest, Parse) { // In base64 encoding, escaped whitespace should be stripped. // (This test was taken from acid3) // http://b/1054495 - { "data:text/javascript;base64,%20ZD%20Qg%0D%0APS%20An%20Zm91cic%0D%0A%207%20", + { "data:text/javascript;base64,%20ZD%20Qg%0D%0APS%20An%20Zm91cic%0D%0A%207" + "%20", true, "text/javascript", "US-ASCII", @@ -154,4 +156,3 @@ TEST(DataURLTest, Parse) { } } } - diff --git a/net/base/escape_unittest.cc b/net/base/escape_unittest.cc index 3775945..cb80e37 100644 --- a/net/base/escape_unittest.cc +++ b/net/base/escape_unittest.cc @@ -115,18 +115,28 @@ TEST(Escape, UnescapeURLComponent) { {"%%%%%%", UnescapeRule::NORMAL, "%%%%%%"}, {"Don't escape anything", UnescapeRule::NORMAL, "Don't escape anything"}, {"Invalid %escape %2", UnescapeRule::NORMAL, "Invalid %escape %2"}, - {"Some%20random text %25%3bOK", UnescapeRule::NORMAL, "Some%20random text %25;OK"}, - {"Some%20random text %25%3bOK", UnescapeRule::SPACES, "Some random text %25;OK"}, - {"Some%20random text %25%3bOK", UnescapeRule::URL_SPECIAL_CHARS, "Some%20random text %;OK"}, - {"Some%20random text %25%3bOK", UnescapeRule::SPACES | UnescapeRule::URL_SPECIAL_CHARS, "Some random text %;OK"}, + {"Some%20random text %25%3bOK", UnescapeRule::NORMAL, + "Some%20random text %25;OK"}, + {"Some%20random text %25%3bOK", UnescapeRule::SPACES, + "Some random text %25;OK"}, + {"Some%20random text %25%3bOK", UnescapeRule::URL_SPECIAL_CHARS, + "Some%20random text %;OK"}, + {"Some%20random text %25%3bOK", + UnescapeRule::SPACES | UnescapeRule::URL_SPECIAL_CHARS, + "Some random text %;OK"}, {"%A0%B1%C2%D3%E4%F5", UnescapeRule::NORMAL, "\xA0\xB1\xC2\xD3\xE4\xF5"}, {"%Aa%Bb%Cc%Dd%Ee%Ff", UnescapeRule::NORMAL, "\xAa\xBb\xCc\xDd\xEe\xFf"}, // Certain URL-sensitive characters should not be unescaped unless asked. - {"Hello%20%13%10world %23# %3F? %3D= %26& %25% %2B+", UnescapeRule::SPACES, "Hello %13%10world %23# %3F? %3D= %26& %25% %2B+"}, - {"Hello%20%13%10world %23# %3F? %3D= %26& %25% %2B+", UnescapeRule::URL_SPECIAL_CHARS, "Hello%20%13%10world ## ?? == && %% ++"}, + {"Hello%20%13%10world %23# %3F? %3D= %26& %25% %2B+", UnescapeRule::SPACES, + "Hello %13%10world %23# %3F? %3D= %26& %25% %2B+"}, + {"Hello%20%13%10world %23# %3F? %3D= %26& %25% %2B+", + UnescapeRule::URL_SPECIAL_CHARS, + "Hello%20%13%10world ## ?? == && %% ++"}, // Control characters. - {"%01%02%03%04%05%06%07%08%09 %25", UnescapeRule::URL_SPECIAL_CHARS, "%01%02%03%04%05%06%07%08%09 %"}, - {"%01%02%03%04%05%06%07%08%09 %25", UnescapeRule::CONTROL_CHARS, "\x01\x02\x03\x04\x05\x06\x07\x08\x09 %25"}, + {"%01%02%03%04%05%06%07%08%09 %25", UnescapeRule::URL_SPECIAL_CHARS, + "%01%02%03%04%05%06%07%08%09 %"}, + {"%01%02%03%04%05%06%07%08%09 %25", UnescapeRule::CONTROL_CHARS, + "\x01\x02\x03\x04\x05\x06\x07\x08\x09 %25"}, {"Hello%20%13%10%02", UnescapeRule::SPACES, "Hello %13%10%02"}, {"Hello%20%13%10%02", UnescapeRule::CONTROL_CHARS, "Hello%20\x13\x10\x02"}, }; @@ -221,5 +231,3 @@ TEST(Escape, EscapeForHTML) { EXPECT_EQ(std::string(tests[i].expected_output), result); } } - - diff --git a/net/base/mime_sniffer.cc b/net/base/mime_sniffer.cc index d67682b..0a8bc57 100644 --- a/net/base/mime_sniffer.cc +++ b/net/base/mime_sniffer.cc @@ -332,7 +332,8 @@ static bool SniffXML(const char* content, size_t size, std::string* result) { // Skip XML declarations. ++pos; continue; - } else if (base::strncasecmp(pos, "<!DOCTYPE", sizeof("<!DOCTYPE")-1) == 0) { + } else if (base::strncasecmp(pos, "<!DOCTYPE", + sizeof("<!DOCTYPE")-1) == 0) { // Skip DOCTYPE declarations. ++pos; continue; diff --git a/net/base/mime_util.cc b/net/base/mime_util.cc index 1528d3a..aec47a9 100644 --- a/net/base/mime_util.cc +++ b/net/base/mime_util.cc @@ -155,7 +155,7 @@ bool MimeUtil::GetMimeTypeFromFile(const FilePath& file_path, // From WebKit's WebCore/platform/MIMETypeRegistry.cpp: -static const char* supported_image_types[] = { +static const char* const supported_image_types[] = { "image/jpeg", "image/jpg", "image/png", @@ -166,7 +166,7 @@ static const char* supported_image_types[] = { }; // Note: does not include javascript types list (see supported_javascript_types) -static const char* supported_non_image_types[] = { +static const char* const supported_non_image_types[] = { "text/html", "text/xml", "text/xsl", @@ -181,12 +181,16 @@ static const char* supported_non_image_types[] = { }; // Mozilla 1.8 and WinIE 7 both accept text/javascript and text/ecmascript. -// Mozilla 1.8 accepts application/javascript, application/ecmascript, and application/x-javascript, but WinIE 7 doesn't. -// WinIE 7 accepts text/javascript1.1 - text/javascript1.3, text/jscript, and text/livescript, but Mozilla 1.8 doesn't. +// Mozilla 1.8 accepts application/javascript, application/ecmascript, and +// application/x-javascript, but WinIE 7 doesn't. +// WinIE 7 accepts text/javascript1.1 - text/javascript1.3, text/jscript, and +// text/livescript, but Mozilla 1.8 doesn't. // Mozilla 1.8 allows leading and trailing whitespace, but WinIE 7 doesn't. -// Mozilla 1.8 and WinIE 7 both accept the empty string, but neither accept a whitespace-only string. -// We want to accept all the values that either of these browsers accept, but not other values. -static const char* supported_javascript_types[] = { +// Mozilla 1.8 and WinIE 7 both accept the empty string, but neither accept a +// whitespace-only string. +// We want to accept all the values that either of these browsers accept, but +// not other values. +static const char* const supported_javascript_types[] = { "text/javascript", "text/ecmascript", "application/javascript", @@ -199,7 +203,7 @@ static const char* supported_javascript_types[] = { "text/livescript" }; -static const char* view_source_types[] = { +static const char* const view_source_types[] = { "text/xml", "text/xsl", "application/xml", @@ -333,4 +337,3 @@ bool MatchesMimeType(const std::string &mime_type_pattern, } } // namespace net - diff --git a/net/base/net_util.cc b/net/base/net_util.cc index b88a3bf..3bad60b 100644 --- a/net/base/net_util.cc +++ b/net/base/net_util.cc @@ -167,7 +167,8 @@ STR GetSpecificHeaderT(const STR& headers, const STR& name) { // TODO(jungshik): We have almost identical hex-decoding code else where. // Consider refactoring and moving it somewhere(base?). Bug 1224311 inline bool IsHexDigit(unsigned char c) { - return (('0' <= c && c <= '9') || ('A' <= c && c <= 'F') || ('a' <= c && c <= 'f')); + return (('0' <= c && c <= '9') || ('A' <= c && c <= 'F') || + ('a' <= c && c <= 'f')); } inline unsigned char HexToInt(unsigned char c) { @@ -564,7 +565,8 @@ bool IsIDNComponentSafe(const char16* str, // rebuilt with the minimal subset of locale data for languages // to which Chrome is not localized but which we offer in the list // of languages selectable for Accept-Languages. With the rebuilt ICU - // data, ulocdata_open never should fall back to the default locale. (issue 2078) + // data, ulocdata_open never should fall back to the default locale. + // (issue 2078) // DCHECK(U_SUCCESS(status) && status != U_USING_DEFAULT_WARNING); if (U_SUCCESS(status) && status != U_USING_DEFAULT_WARNING) { // Should we use auxiliary set, instead? diff --git a/net/base/net_util_unittest.cc b/net/base/net_util_unittest.cc index c4083dc..87a8889b 100644 --- a/net/base/net_util_unittest.cc +++ b/net/base/net_util_unittest.cc @@ -116,16 +116,22 @@ TEST(NetUtilTest, FileURLConversion) { const FileCase round_trip_cases[] = { #if defined(OS_WIN) {L"C:\\foo\\bar.txt", L"file:///C:/foo/bar.txt"}, - {L"\\\\some computer\\foo\\bar.txt", L"file://some%20computer/foo/bar.txt"}, // UNC - {L"D:\\Name;with%some symbols*#", L"file:///D:/Name%3Bwith%25some%20symbols*%23"}, - {L"D:\\Chinese\\\x6240\x6709\x4e2d\x6587\x7f51\x9875.doc", L"file:///D:/Chinese/%E6%89%80%E6%9C%89%E4%B8%AD%E6%96%87%E7%BD%91%E9%A1%B5.doc"}, + {L"\\\\some computer\\foo\\bar.txt", + L"file://some%20computer/foo/bar.txt"}, // UNC + {L"D:\\Name;with%some symbols*#", + L"file:///D:/Name%3Bwith%25some%20symbols*%23"}, + {L"D:\\Chinese\\\x6240\x6709\x4e2d\x6587\x7f51\x9875.doc", + L"file:///D:/Chinese/%E6%89%80%E6%9C%89%E4%B8%AD%E6%96%87%E7%BD%91" + L"%E9%A1%B5.doc"}, #elif defined(OS_POSIX) {L"/foo/bar.txt", L"file:///foo/bar.txt"}, {L"/foo/BAR.txt", L"file:///foo/BAR.txt"}, {L"/C:/foo/bar.txt", L"file:///C:/foo/bar.txt"}, {L"/some computer/foo/bar.txt", L"file:///some%20computer/foo/bar.txt"}, {L"/Name;with%some symbols*#", L"file:///Name%3Bwith%25some%20symbols*%23"}, - {L"/Chinese/\x6240\x6709\x4e2d\x6587\x7f51\x9875.doc", L"file:///Chinese/%E6%89%80%E6%9C%89%E4%B8%AD%E6%96%87%E7%BD%91%E9%A1%B5.doc"}, + {L"/Chinese/\x6240\x6709\x4e2d\x6587\x7f51\x9875.doc", + L"file:///Chinese/%E6%89%80%E6%9C%89%E4%B8%AD%E6%96%87%E7%BD" + L"%91%E9%A1%B5.doc"}, #endif }; @@ -184,11 +190,14 @@ TEST(NetUtilTest, FileURLConversion) { // Here, we test that UTF-8 encoded strings get decoded properly, even when // they might be stored with wide characters. On posix systems, just treat // this as a stream of bytes. - const wchar_t utf8[] = L"file:///d:/Chinese/\xe6\x89\x80\xe6\x9c\x89\xe4\xb8\xad\xe6\x96\x87\xe7\xbd\x91\xe9\xa1\xb5.doc"; + const wchar_t utf8[] = L"file:///d:/Chinese/\xe6\x89\x80\xe6\x9c\x89\xe4\xb8" + L"\xad\xe6\x96\x87\xe7\xbd\x91\xe9\xa1\xb5.doc"; #if defined(OS_WIN) - const wchar_t wide[] = L"D:\\Chinese\\\x6240\x6709\x4e2d\x6587\x7f51\x9875.doc"; + const wchar_t wide[] = + L"D:\\Chinese\\\x6240\x6709\x4e2d\x6587\x7f51\x9875.doc"; #elif defined(OS_POSIX) - const wchar_t wide[] = L"/d:/Chinese/\xe6\x89\x80\xe6\x9c\x89\xe4\xb8\xad\xe6\x96\x87\xe7\xbd\x91\xe9\xa1\xb5.doc"; + const wchar_t wide[] = L"/d:/Chinese/\xe6\x89\x80\xe6\x9c\x89\xe4\xb8\xad\xe6" + L"\x96\x87\xe7\xbd\x91\xe9\xa1\xb5.doc"; #endif EXPECT_TRUE(net::FileURLToFilePath(GURL(WideToUTF8(utf8)), &output)); EXPECT_EQ(std::wstring(wide), output); @@ -220,7 +229,8 @@ const wchar_t* google_headers = L"Transfer-Encoding: chunked\n" L"Set-Cookie: HEHE_AT=6666x66beef666x6-66xx6666x66; Path=/mail\n" L"Set-Cookie: HEHE_HELP=owned:0;Path=/\n" - L"Set-Cookie: S=gmail=Xxx-beefbeefbeef_beefb:gmail_yj=beefbeef000beefbeefbee:gmproxy=bee-fbeefbe; Domain=.google.com; Path=/\n" + L"Set-Cookie: S=gmail=Xxx-beefbeefbeef_beefb:gmail_yj=beefbeef000beefbee" + L"fbee:gmproxy=bee-fbeefbe; Domain=.google.com; Path=/\n" L"X-Google-Google2: /one/two/three/four/five/six/seven-height/nine:9411\n" L"Server: GFE/1.3\n" L"Transfer-Encoding: chunked\n" @@ -303,7 +313,8 @@ TEST(NetUtilTest, GetFileNameFromCD) { {"content-disposition: name=abcde.pdf", L"abcde.pdf"}, {"content-disposition: inline; filename=\"abc%20de.pdf\"", L"abc de.pdf"}, // Whitespaces are converted to a space. - {"content-disposition: inline; filename=\"abc \t\nde.pdf\"", L"abc de.pdf"}, + {"content-disposition: inline; filename=\"abc \t\nde.pdf\"", + L"abc de.pdf"}, // %-escaped UTF-8 {"Content-Disposition: attachment; filename=\"%EC%98%88%EC%88%A0%20" "%EC%98%88%EC%88%A0.jpg\"", L"\xc608\xc220 \xc608\xc220.jpg"}, @@ -583,8 +594,10 @@ TEST(NetUtilTest, IDNToUnicode) { false, false, false, false, false, false}}, // One that's really long that will force a buffer realloc - {"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - L"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + {"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaa", + L"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + L"aaaaaaaa", {true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, @@ -775,7 +788,8 @@ TEST(NetUtilTest, GetDirectoryListingEntry) { false, 10000, base::Time(), - "<script>addRow(\"quo\\\"tes\",\"quo%22tes\",0,\"9.8 kB\",\"\");</script>\n"}, + "<script>addRow(\"quo\\\"tes\",\"quo%22tes\",0,\"9.8 kB\",\"\");</script>" + "\n"}, }; for (size_t i = 0; i < ARRAYSIZE_UNSAFE(test_cases); ++i) { diff --git a/net/base/nss_memio.c b/net/base/nss_memio.c index f32ca5d..d92adf6 100644 --- a/net/base/nss_memio.c +++ b/net/base/nss_memio.c @@ -168,14 +168,14 @@ static int memio_buffer_put(struct memio_buffer *mb, const char *buf, int n) transferred += len; /* Handle part after wrap */ - len = PR_MIN(n, memio_buffer_unused_contiguous(mb)); - if (len > 0) { + len = PR_MIN(n, memio_buffer_unused_contiguous(mb)); + if (len > 0) { /* Output buffer still not full, input buffer still not empty */ - memcpy(&mb->buf[mb->tail], buf, len); - mb->tail += len; + memcpy(&mb->buf[mb->tail], buf, len); + mb->tail += len; if (mb->tail == mb->bufsize) mb->tail = 0; - transferred += len; + transferred += len; } } @@ -201,13 +201,13 @@ static int memio_buffer_get(struct memio_buffer *mb, char *buf, int n) transferred += len; /* Handle part after wrap */ - len = PR_MIN(n, memio_buffer_used_contiguous(mb)); - if (len) { - memcpy(buf, &mb->buf[mb->head], len); - mb->head += len; + len = PR_MIN(n, memio_buffer_used_contiguous(mb)); + if (len) { + memcpy(buf, &mb->buf[mb->head], len); + mb->head += len; if (mb->head == mb->bufsize) mb->head = 0; - transferred += len; + transferred += len; } } @@ -429,7 +429,7 @@ void memio_PutReadResult(memio_Private *secret, int bytes_read) if (bytes_read > 0) { mb->tail += bytes_read; if (mb->tail == mb->bufsize) - mb->tail = 0; + mb->tail = 0; } else if (bytes_read == 0) { /* Record EOF condition and report to caller when buffer runs dry */ ((PRFilePrivate *)secret)->eof = PR_TRUE; @@ -472,8 +472,8 @@ void memio_PutWriteResult(memio_Private *secret, int bytes_written) #define CHECKEQ(a, b) { \ if ((a) != (b)) { \ - printf("%d != %d, Test failed line %d\n", a, b, __LINE__); \ - exit(1); \ + printf("%d != %d, Test failed line %d\n", a, b, __LINE__); \ + exit(1); \ } \ } diff --git a/net/base/ssl_client_socket_nss.cc b/net/base/ssl_client_socket_nss.cc index 648f807..4777ddc 100644 --- a/net/base/ssl_client_socket_nss.cc +++ b/net/base/ssl_client_socket_nss.cc @@ -400,7 +400,8 @@ int SSLClientSocketNSS::DoLoop(int last_io_result) { int nreceived = BufferRecv(); network_moved = (nsent > 0 || nreceived >= 0); } - } while ((rv != ERR_IO_PENDING || network_moved) && next_state_ != STATE_NONE); + } while ((rv != ERR_IO_PENDING || network_moved) && + next_state_ != STATE_NONE); LeaveFunction(""); return rv; } @@ -585,4 +586,3 @@ int SSLClientSocketNSS::DoPayloadWrite() { } } // namespace net - diff --git a/net/base/telnet_server.h b/net/base/telnet_server.h index 1f7c21a..9b9c4e8 100644 --- a/net/base/telnet_server.h +++ b/net/base/telnet_server.h @@ -24,15 +24,24 @@ protected: private: enum TelnetInputState { - NOT_IN_IAC_OR_ESC_SEQUENCE, // Currently not processing any IAC or ESC sequence. - EXPECTING_NEW_LINE, // Received carriage return (CR) expecting new line (LF). - EXPECTING_COMMAND, // Processing IAC expecting command. - EXPECTING_OPTION, // Processing IAC expecting option. - SUBNEGOTIATION_EXPECTING_IAC, // Inside subnegoation IAC,SE will end it. - SUBNEGOTIATION_EXPECTING_SE, // Ending subnegoation expecting SE. - EXPECTING_FIRST_ESC_CHARACTER, // Processing ESC sequence. - EXPECTING_SECOND_ESC_CHARACTER, // Processing ESC sequence with two characters - EXPECTING_NUMBER_SEMICOLON_OR_END // Processing "ESC [" sequence. + // Currently not processing any IAC or ESC sequence. + NOT_IN_IAC_OR_ESC_SEQUENCE, + // Received carriage return (CR) expecting new line (LF). + EXPECTING_NEW_LINE, + // Processing IAC expecting command. + EXPECTING_COMMAND, + // Processing IAC expecting option. + EXPECTING_OPTION, + // Inside subnegoation IAC,SE will end it. + SUBNEGOTIATION_EXPECTING_IAC, + // Ending subnegoation expecting SE. + SUBNEGOTIATION_EXPECTING_SE, + // Processing ESC sequence. + EXPECTING_FIRST_ESC_CHARACTER, + // Processing ESC sequence with two characters. + EXPECTING_SECOND_ESC_CHARACTER, + // Processing "ESC [" sequence. + EXPECTING_NUMBER_SEMICOLON_OR_END }; TelnetServer(SOCKET s, ListenSocketDelegate* del); @@ -50,4 +59,3 @@ private: }; #endif // BASE_TELNET_SERVER_H_ - diff --git a/net/http/http_cache.cc b/net/http/http_cache.cc index 913dc00..75f0256 100644 --- a/net/http/http_cache.cc +++ b/net/http/http_cache.cc @@ -320,7 +320,8 @@ class HttpCache::Transaction uint64 final_upload_progress_; CompletionCallbackImpl<Transaction> network_info_callback_; CompletionCallbackImpl<Transaction> network_read_callback_; - scoped_refptr<CancelableCompletionCallback<Transaction> > cache_read_callback_; + scoped_refptr<CancelableCompletionCallback<Transaction> > + cache_read_callback_; }; HttpCache::Transaction::~Transaction() { @@ -1458,4 +1459,3 @@ void HttpCache::OnProcessPendingQueue(ActiveEntry* entry) { //----------------------------------------------------------------------------- } // namespace net - diff --git a/net/http/http_cache_unittest.cc b/net/http/http_cache_unittest.cc index 5db229b..92980e1 100644 --- a/net/http/http_cache_unittest.cc +++ b/net/http/http_cache_unittest.cc @@ -820,7 +820,8 @@ static void ETagGet_ConditionalRequest_Handler( std::string* response_status, std::string* response_headers, std::string* response_data) { - EXPECT_TRUE(request->extra_headers.find("If-None-Match") != std::string::npos); + EXPECT_TRUE(request->extra_headers.find("If-None-Match") != + std::string::npos); response_status->assign("HTTP/1.1 304 Not Modified"); response_headers->assign(kETagGET_Transaction.response_headers); response_data->clear(); @@ -957,7 +958,8 @@ TEST(HttpCache, RangeGET_SkipsCache) { EXPECT_EQ(0, cache.disk_cache()->open_count()); EXPECT_EQ(0, cache.disk_cache()->create_count()); - transaction.request_headers = "If-Modified-Since: Wed, 28 Nov 2007 00:45:20 GMT"; + transaction.request_headers = + "If-Modified-Since: Wed, 28 Nov 2007 00:45:20 GMT"; RunTransactionTest(cache.http_cache(), transaction); EXPECT_EQ(3, cache.network_layer()->transaction_count()); diff --git a/net/http/http_chunked_decoder_unittest.cc b/net/http/http_chunked_decoder_unittest.cc index c8b4f9f..51fbba0 100644 --- a/net/http/http_chunked_decoder_unittest.cc +++ b/net/http/http_chunked_decoder_unittest.cc @@ -32,7 +32,9 @@ void RunTest(const char* inputs[], size_t num_inputs, } // Feed the inputs to the decoder, until it returns an error. -void RunTestUntilFailure(const char* inputs[], size_t num_inputs, size_t fail_index) { +void RunTestUntilFailure(const char* inputs[], + size_t num_inputs, + size_t fail_index) { net::HttpChunkedDecoder decoder; EXPECT_FALSE(decoder.reached_eof()); @@ -264,4 +266,3 @@ TEST(HttpChunkedDecoderTest, InvalidConsecutiveCRLFs) { }; RunTestUntilFailure(inputs, arraysize(inputs), 1); } - diff --git a/net/http/http_response_headers.h b/net/http/http_response_headers.h index 4158d79..918c293 100644 --- a/net/http/http_response_headers.h +++ b/net/http/http_response_headers.h @@ -226,8 +226,8 @@ class HttpResponseHeaders : std::string::const_iterator line_end); // Tries to extract the status line from a header block, given the first - // line of said header block. If the status line is malformed, we'll construct - // a valid one. Example input: + // line of said header block. If the status line is malformed, we'll + // construct a valid one. Example input: // HTTP/1.1 200 OK // with line_begin and end pointing at the begin and end of this line. // Output will be a normalized version of this, with a trailing \n. diff --git a/net/http/http_response_headers_unittest.cc b/net/http/http_response_headers_unittest.cc index 9c7ce75..ef7d8dc 100644 --- a/net/http/http_response_headers_unittest.cc +++ b/net/http/http_response_headers_unittest.cc @@ -636,7 +636,8 @@ TEST(HttpResponseHeadersTest, GetMimeType) { for (size_t i = 0; i < arraysize(tests); ++i) { string headers(tests[i].raw_headers); HeadersToRaw(&headers); - scoped_refptr<HttpResponseHeaders> parsed = new HttpResponseHeaders(headers); + scoped_refptr<HttpResponseHeaders> parsed = + new HttpResponseHeaders(headers); std::string value; EXPECT_EQ(tests[i].has_mimetype, parsed->GetMimeType(&value)); @@ -762,7 +763,8 @@ TEST(HttpResponseHeadersTest, RequiresValidation) { for (size_t i = 0; i < ARRAYSIZE_UNSAFE(tests); ++i) { string headers(tests[i].headers); HeadersToRaw(&headers); - scoped_refptr<HttpResponseHeaders> parsed = new HttpResponseHeaders(headers); + scoped_refptr<HttpResponseHeaders> parsed = + new HttpResponseHeaders(headers); bool requires_validation = parsed->RequiresValidation(request_time, response_time, current_time); diff --git a/net/proxy/proxy_service_unittest.cc b/net/proxy/proxy_service_unittest.cc index ea1d56b..bf1b3d0 100644 --- a/net/proxy/proxy_service_unittest.cc +++ b/net/proxy/proxy_service_unittest.cc @@ -343,7 +343,8 @@ class MockProxyScriptFetcher : public net::ProxyScriptFetcher { std::string* pending_request_bytes_; }; -// Template specialization so MockProxyScriptFetcher does not have to be refcounted. +// Template specialization so MockProxyScriptFetcher does not have to be +// refcounted. template<> void RunnableMethodTraits<MockProxyScriptFetcher>::RetainCallee( MockProxyScriptFetcher* remover) {} @@ -922,4 +923,3 @@ TEST(ProxyServiceTest, CancelWhilePACFetching) { EXPECT_EQ("pac-v1.request3:80", result3->GetProxyInfo().proxy_server().ToURI()); } - diff --git a/net/tools/testserver/testserver.py b/net/tools/testserver/testserver.py index 2d89733..407694a 100644 --- a/net/tools/testserver/testserver.py +++ b/net/tools/testserver/testserver.py @@ -120,10 +120,16 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.EchoAllHandler, self.EchoHandler] + self._get_handlers - self._mime_types = { 'gif': 'image/gif', 'jpeg' : 'image/jpeg', 'jpg' : 'image/jpeg' } + self._mime_types = { + 'gif': 'image/gif', + 'jpeg' : 'image/jpeg', + 'jpg' : 'image/jpeg' + } self._default_mime_type = 'text/html' - BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request, client_address, socket_server) + BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request, + client_address, + socket_server) def _ShouldHandleRequest(self, handler_name): """Determines if the path can be handled by the handler. @@ -174,7 +180,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Content-type', 'text/html') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -190,7 +197,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Content-type', 'text/html') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -206,7 +214,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Content-type', 'text/html') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -222,7 +231,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Content-type', 'text/html') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -238,7 +248,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'max-age=60, proxy-revalidate') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -254,7 +265,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'max-age=5, private') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -270,7 +282,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'max-age=5, public') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -286,7 +299,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'public, s-maxage = 60, max-age = 0') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -302,7 +316,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'must-revalidate') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -319,7 +334,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'max-age=60, must-revalidate') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -335,7 +351,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'no-store') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -352,7 +369,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'max-age=60, no-store') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -370,7 +388,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.send_header('Cache-Control', 'no-transform') self.end_headers() - self.wfile.write('<html><head><title>%s</title></head></html>' % time.time()) + self.wfile.write('<html><head><title>%s</title></head></html>' % + time.time()) return True @@ -730,12 +749,14 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): # the request URL into this, but we don't care for testing. nonce_life = 10 stale = False - if not self.server.nonce or (time.time() - self.server.nonce_time > nonce_life): + if (not self.server.nonce or + (time.time() - self.server.nonce_time > nonce_life)): if self.server.nonce: stale = True self.server.nonce_time = time.time() self.server.nonce = \ - _new_md5(time.ctime(self.server.nonce_time) + 'privatekey').hexdigest() + _new_md5(time.ctime(self.server.nonce_time) + + 'privatekey').hexdigest() nonce = self.server.nonce opaque = _new_md5('opaque').hexdigest() @@ -760,7 +781,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): # Check the 'response' value and make sure it matches our magic hash. # See http://www.ietf.org/rfc/rfc2617.txt - hash_a1 = _new_md5(':'.join([pairs['username'], realm, password])).hexdigest() + hash_a1 = _new_md5( + ':'.join([pairs['username'], realm, password])).hexdigest() hash_a2 = _new_md5(':'.join([self.command, pairs['uri']])).hexdigest() if 'qop' in pairs and 'nc' in pairs and 'cnonce' in pairs: response = _new_md5(':'.join([hash_a1, nonce, pairs['nc'], @@ -845,7 +867,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): def ServerRedirectHandler(self): """Sends a server redirect to the given URL. The syntax is - '/server-redirect?http://foo.bar/asdf' to redirect to 'http://foo.bar/asdf'""" + '/server-redirect?http://foo.bar/asdf' to redirect to + 'http://foo.bar/asdf'""" test_name = "/server-redirect" if not self._ShouldHandleRequest(test_name): @@ -868,7 +891,8 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler): def ClientRedirectHandler(self): """Sends a client redirect to the given URL. The syntax is - '/client-redirect?http://foo.bar/asdf' to redirect to 'http://foo.bar/asdf'""" + '/client-redirect?http://foo.bar/asdf' to redirect to + 'http://foo.bar/asdf'""" test_name = "/client-redirect" if not self._ShouldHandleRequest(test_name): @@ -1045,7 +1069,8 @@ def main(options, args): pyftpdlib.ftpserver.logline = line_logger # Define a customized banner (string returned when client connects) - ftp_handler.banner = "pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__ + ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % + pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) @@ -1075,4 +1100,3 @@ if __name__ == '__main__': options, args = option_parser.parse_args() sys.exit(main(options, args)) - diff --git a/net/url_request/url_request_ftp_job.h b/net/url_request/url_request_ftp_job.h index 9dcd7e4..b85a06d 100644 --- a/net/url_request/url_request_ftp_job.h +++ b/net/url_request/url_request_ftp_job.h @@ -72,16 +72,22 @@ class URLRequestFtpJob : public URLRequestInetJob { virtual void ContinueNotifyHeadersComplete(); typedef enum { - START = 0x200, // initial state of the ftp job - CONNECTING, // opening the url - SETTING_CUR_DIRECTORY, // attempting to change current dir to match request - FINDING_FIRST_FILE, // retrieving first file information in cur dir (by - // FtpFindFirstFile) - GETTING_DIRECTORY, // retrieving the directory listing (if directory) - GETTING_FILE_HANDLE, // initiate access to file by call to FtpOpenFile - // (if file). - GETTING_FILE, // retrieving the file (if file) - DONE // URLRequestInetJob is reading the response now + // Initial state of the ftp job. + START = 0x200, + // Opening the url. + CONNECTING, + // Attempting to change current dir to match request. + SETTING_CUR_DIRECTORY, + // Retrieving first file information in cur dir (by FtpFindFirstFile). + FINDING_FIRST_FILE, + // Retrieving the directory listing (if directory). + GETTING_DIRECTORY, + // Initiate access to file by call to FtpOpenFile (if file). + GETTING_FILE_HANDLE, + // Retrieving the file (if file). + GETTING_FILE, + // URLRequestInetJob is reading the response now. + DONE } FtpJobState; // The FtpJob has several asynchronous operations which happen @@ -108,4 +114,3 @@ class URLRequestFtpJob : public URLRequestInetJob { }; #endif // NET_URL_REQUEST_URL_REQUEST_FTP_JOB_H_ - diff --git a/net/url_request/url_request_job_tracker.cc b/net/url_request/url_request_job_tracker.cc index bc6ba2b..1f5b33c 100644 --- a/net/url_request/url_request_job_tracker.cc +++ b/net/url_request/url_request_job_tracker.cc @@ -16,8 +16,8 @@ URLRequestJobTracker::URLRequestJobTracker() { URLRequestJobTracker::~URLRequestJobTracker() { DLOG_IF(WARNING, active_jobs_.size() != 0) << - "Leaking " << active_jobs_.size() << " URLRequestJob object(s), this could be " - "because the URLRequest forgot to free it (bad), or if the program was " + "Leaking " << active_jobs_.size() << " URLRequestJob object(s), this could " + "be because the URLRequest forgot to free it (bad), or if the program was " "terminated while a request was active (normal)."; } @@ -55,4 +55,3 @@ void URLRequestJobTracker::OnBytesRead(URLRequestJob* job, FOR_EACH_OBSERVER(JobObserver, observers_, OnBytesRead(job, byte_count)); } - |