diff options
author | jam <jam@chromium.org> | 2015-01-20 08:33:44 -0800 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2015-01-20 16:35:28 +0000 |
commit | 8e45cd72ae2bf0a6f01aa6c492f336a92e3b95d4 (patch) | |
tree | e8206a3557f39ca1f01c24b9610aebe809daa863 | |
parent | 03c52b7cffefe1e54ab4fa75c14329e01ebf91a0 (diff) | |
download | chromium_src-8e45cd72ae2bf0a6f01aa6c492f336a92e3b95d4.zip chromium_src-8e45cd72ae2bf0a6f01aa6c492f336a92e3b95d4.tar.gz chromium_src-8e45cd72ae2bf0a6f01aa6c492f336a92e3b95d4.tar.bz2 |
Move URLRequestSlowDownloadJob to net/test/url_request.
This way it's in the same directory as the other test URLRequest classes, since it's used by both content and chrome.
This cleans up some incorrect DEPS rules in chrome where it was reaching to internal content directories.
Review URL: https://codereview.chromium.org/852413002
Cr-Commit-Position: refs/heads/master@{#312210}
-rw-r--r-- | chrome/browser/download/download_browsertest.cc | 36 | ||||
-rw-r--r-- | chrome/browser/extensions/api/downloads/downloads_api_browsertest.cc | 13 | ||||
-rw-r--r-- | chrome/browser/lifetime/browser_close_manager_browsertest.cc | 4 | ||||
-rw-r--r-- | chrome/browser/net/url_request_mock_util.cc | 4 | ||||
-rw-r--r-- | chrome/browser/policy/DEPS | 1 | ||||
-rw-r--r-- | chrome/browser/policy/cloud/DEPS | 1 | ||||
-rw-r--r-- | chrome/browser/ui/browser_close_browsertest.cc | 7 | ||||
-rw-r--r-- | content/browser/download/download_browsertest.cc | 19 | ||||
-rw-r--r-- | content/browser/download/drag_download_file_browsertest.cc | 1 | ||||
-rw-r--r-- | content/content_tests.gypi | 2 | ||||
-rw-r--r-- | net/BUILD.gn | 2 | ||||
-rw-r--r-- | net/net.gyp | 2 | ||||
-rw-r--r-- | net/test/url_request/url_request_slow_download_job.cc (renamed from content/test/net/url_request_slow_download_job.cc) | 113 | ||||
-rw-r--r-- | net/test/url_request/url_request_slow_download_job.h (renamed from content/test/net/url_request_slow_download_job.h) | 44 |
14 files changed, 118 insertions, 131 deletions
diff --git a/chrome/browser/download/download_browsertest.cc b/chrome/browser/download/download_browsertest.cc index d315768..658cbaba 100644 --- a/chrome/browser/download/download_browsertest.cc +++ b/chrome/browser/download/download_browsertest.cc @@ -85,12 +85,12 @@ #include "content/public/test/download_test_observer.h" #include "content/public/test/test_file_error_injector.h" #include "content/public/test/test_navigation_observer.h" -#include "content/test/net/url_request_slow_download_job.h" #include "extensions/browser/extension_system.h" #include "extensions/common/feature_switch.h" #include "net/base/filename_util.h" #include "net/test/spawned_test_server/spawned_test_server.h" #include "net/test/url_request/url_request_mock_http_job.h" +#include "net/test/url_request/url_request_slow_download_job.h" #include "testing/gtest/include/gtest/gtest.h" #include "ui/base/l10n/l10n_util.h" #include "ui/base/page_transition_types.h" @@ -107,7 +107,6 @@ using content::BrowserThread; using content::DownloadItem; using content::DownloadManager; using content::DownloadUrlParameters; -using content::URLRequestSlowDownloadJob; using content::WebContents; using extensions::Extension; using extensions::FeatureSwitch; @@ -680,7 +679,7 @@ class DownloadTest : public InProcessBrowserTest { DownloadItem* CreateSlowTestDownload() { scoped_ptr<content::DownloadTestObserver> observer( CreateInProgressDownloadObserver(1)); - GURL slow_download_url(URLRequestSlowDownloadJob::kUnknownSizeUrl); + GURL slow_download_url(net::URLRequestSlowDownloadJob::kUnknownSizeUrl); DownloadManager* manager = DownloadManagerForBrowser(browser()); EXPECT_EQ(0, manager->NonMaliciousInProgressCount()); @@ -716,8 +715,8 @@ class DownloadTest : public InProcessBrowserTest { if (type != SIZE_TEST_TYPE_KNOWN && type != SIZE_TEST_TYPE_UNKNOWN) return false; GURL url(type == SIZE_TEST_TYPE_KNOWN ? - URLRequestSlowDownloadJob::kKnownSizeUrl : - URLRequestSlowDownloadJob::kUnknownSizeUrl); + net::URLRequestSlowDownloadJob::kKnownSizeUrl : + net::URLRequestSlowDownloadJob::kUnknownSizeUrl); // TODO(ahendrickson) -- |expected_title_in_progress| and // |expected_title_finished| need to be checked. @@ -746,7 +745,7 @@ class DownloadTest : public InProcessBrowserTest { // Allow the request to finish. We do this by loading a second URL in a // separate tab. - GURL finish_url(URLRequestSlowDownloadJob::kFinishDownloadUrl); + GURL finish_url(net::URLRequestSlowDownloadJob::kFinishDownloadUrl); ui_test_utils::NavigateToURLWithDisposition( browser, finish_url, @@ -772,8 +771,8 @@ class DownloadTest : public InProcessBrowserTest { return false; // Check the file contents. - size_t file_size = URLRequestSlowDownloadJob::kFirstDownloadSize + - URLRequestSlowDownloadJob::kSecondDownloadSize; + size_t file_size = net::URLRequestSlowDownloadJob::kFirstDownloadSize + + net::URLRequestSlowDownloadJob::kSecondDownloadSize; std::string expected_contents(file_size, '*'); EXPECT_TRUE(VerifyFile(download_path, expected_contents, file_size)); @@ -1093,7 +1092,7 @@ class DownloadTest : public InProcessBrowserTest { private: static void EnsureNoPendingDownloadJobsOnIO(bool* result) { - if (URLRequestSlowDownloadJob::NumberOutstandingRequests()) + if (net::URLRequestSlowDownloadJob::NumberOutstandingRequests()) *result = false; BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::MessageLoop::QuitClosure()); @@ -1739,7 +1738,7 @@ IN_PROC_BROWSER_TEST_F(DownloadTest, NewWindow) { } IN_PROC_BROWSER_TEST_F(DownloadTest, DownloadHistoryCheck) { - GURL download_url(URLRequestSlowDownloadJob::kKnownSizeUrl); + GURL download_url(net::URLRequestSlowDownloadJob::kKnownSizeUrl); base::FilePath file(net::GenerateFileName(download_url, std::string(), std::string(), @@ -1778,9 +1777,11 @@ IN_PROC_BROWSER_TEST_F(DownloadTest, DownloadHistoryCheck) { EXPECT_EQ(download_url.spec(), row.url_chain[1].spec()); EXPECT_EQ(history::DownloadDangerType::NOT_DANGEROUS, row.danger_type); EXPECT_LE(start, row.start_time); - EXPECT_EQ(URLRequestSlowDownloadJob::kFirstDownloadSize, row.received_bytes); - EXPECT_EQ(URLRequestSlowDownloadJob::kFirstDownloadSize - + URLRequestSlowDownloadJob::kSecondDownloadSize, row.total_bytes); + EXPECT_EQ(net::URLRequestSlowDownloadJob::kFirstDownloadSize, + row.received_bytes); + EXPECT_EQ(net::URLRequestSlowDownloadJob::kFirstDownloadSize + + net::URLRequestSlowDownloadJob::kSecondDownloadSize, + row.total_bytes); EXPECT_EQ(history::DownloadState::IN_PROGRESS, row.state); EXPECT_FALSE(row.opened); @@ -1790,7 +1791,7 @@ IN_PROC_BROWSER_TEST_F(DownloadTest, DownloadHistoryCheck) { scoped_ptr<content::DownloadTestObserver> download_observer( CreateWaiter(browser(), 1)); ui_test_utils::NavigateToURL(browser(), - GURL(URLRequestSlowDownloadJob::kErrorDownloadUrl)); + GURL(net::URLRequestSlowDownloadJob::kErrorDownloadUrl)); download_observer->WaitForFinished(); EXPECT_EQ(1u, download_observer->NumDownloadsSeenInState( DownloadItem::INTERRUPTED)); @@ -1812,10 +1813,11 @@ IN_PROC_BROWSER_TEST_F(DownloadTest, DownloadHistoryCheck) { EXPECT_EQ(history::DownloadDangerType::NOT_DANGEROUS, row1.danger_type); EXPECT_LE(start, row1.start_time); EXPECT_GE(end, row1.end_time); - EXPECT_EQ(URLRequestSlowDownloadJob::kFirstDownloadSize, + EXPECT_EQ(net::URLRequestSlowDownloadJob::kFirstDownloadSize, row1.received_bytes); - EXPECT_EQ(URLRequestSlowDownloadJob::kFirstDownloadSize - + URLRequestSlowDownloadJob::kSecondDownloadSize, row1.total_bytes); + EXPECT_EQ(net::URLRequestSlowDownloadJob::kFirstDownloadSize + + net::URLRequestSlowDownloadJob::kSecondDownloadSize, + row1.total_bytes); EXPECT_EQ(history::DownloadState::INTERRUPTED, row1.state); EXPECT_EQ(history::ToHistoryDownloadInterruptReason( content::DOWNLOAD_INTERRUPT_REASON_NETWORK_FAILED), diff --git a/chrome/browser/extensions/api/downloads/downloads_api_browsertest.cc b/chrome/browser/extensions/api/downloads/downloads_api_browsertest.cc index 6028304..c7ea81d 100644 --- a/chrome/browser/extensions/api/downloads/downloads_api_browsertest.cc +++ b/chrome/browser/extensions/api/downloads/downloads_api_browsertest.cc @@ -39,11 +39,11 @@ #include "content/public/browser/web_contents.h" #include "content/public/common/content_switches.h" #include "content/public/test/download_test_observer.h" -#include "content/test/net/url_request_slow_download_job.h" #include "extensions/browser/event_router.h" #include "extensions/browser/notification_types.h" #include "net/base/data_url.h" #include "net/base/net_util.h" +#include "net/test/url_request/url_request_slow_download_job.h" #include "net/url_request/url_request.h" #include "net/url_request/url_request_context.h" #include "net/url_request/url_request_job.h" @@ -58,7 +58,6 @@ using content::BrowserContext; using content::BrowserThread; using content::DownloadItem; using content::DownloadManager; -using content::URLRequestSlowDownloadJob; namespace errors = download_extension_errors; @@ -451,7 +450,7 @@ class DownloadExtensionTest : public ExtensionApiTest { for (size_t i = 0; i < count; ++i) { scoped_ptr<content::DownloadTestObserver> observer( CreateInProgressDownloadObserver(1)); - GURL slow_download_url(URLRequestSlowDownloadJob::kUnknownSizeUrl); + GURL slow_download_url(net::URLRequestSlowDownloadJob::kUnknownSizeUrl); ui_test_utils::NavigateToURL(current_browser(), slow_download_url); observer->WaitForFinished(); EXPECT_EQ( @@ -464,7 +463,7 @@ class DownloadExtensionTest : public ExtensionApiTest { DownloadItem* CreateSlowTestDownload() { scoped_ptr<content::DownloadTestObserver> observer( CreateInProgressDownloadObserver(1)); - GURL slow_download_url(URLRequestSlowDownloadJob::kUnknownSizeUrl); + GURL slow_download_url(net::URLRequestSlowDownloadJob::kUnknownSizeUrl); DownloadManager* manager = GetCurrentManager(); EXPECT_EQ(0, manager->NonMaliciousInProgressCount()); @@ -495,7 +494,7 @@ class DownloadExtensionTest : public ExtensionApiTest { void FinishPendingSlowDownloads() { scoped_ptr<content::DownloadTestObserver> observer( CreateDownloadObserver(1)); - GURL finish_url(URLRequestSlowDownloadJob::kFinishDownloadUrl); + GURL finish_url(net::URLRequestSlowDownloadJob::kFinishDownloadUrl); ui_test_utils::NavigateToURLWithDisposition( current_browser(), finish_url, NEW_FOREGROUND_TAB, ui_test_utils::BROWSER_TEST_WAIT_FOR_NAVIGATION); @@ -3877,7 +3876,7 @@ IN_PROC_BROWSER_TEST_F( // resumed. http://crbug.com/225901 ui_test_utils::NavigateToURLWithDisposition( current_browser(), - GURL(URLRequestSlowDownloadJob::kUnknownSizeUrl), + GURL(net::URLRequestSlowDownloadJob::kUnknownSizeUrl), CURRENT_TAB, ui_test_utils::BROWSER_TEST_NONE); observer->WaitForFinished(); @@ -3917,7 +3916,7 @@ IN_PROC_BROWSER_TEST_F( ClearEvents(); ui_test_utils::NavigateToURLWithDisposition( current_browser(), - GURL(URLRequestSlowDownloadJob::kErrorDownloadUrl), + GURL(net::URLRequestSlowDownloadJob::kErrorDownloadUrl), NEW_BACKGROUND_TAB, ui_test_utils::BROWSER_TEST_WAIT_FOR_NAVIGATION); diff --git a/chrome/browser/lifetime/browser_close_manager_browsertest.cc b/chrome/browser/lifetime/browser_close_manager_browsertest.cc index 4718b72..4b9e34d 100644 --- a/chrome/browser/lifetime/browser_close_manager_browsertest.cc +++ b/chrome/browser/lifetime/browser_close_manager_browsertest.cc @@ -39,9 +39,9 @@ #include "content/public/browser/web_contents.h" #include "content/public/test/download_test_observer.h" #include "content/public/test/test_navigation_observer.h" -#include "content/test/net/url_request_slow_download_job.h" #include "net/test/embedded_test_server/embedded_test_server.h" #include "net/test/url_request/url_request_mock_http_job.h" +#include "net/test/url_request/url_request_slow_download_job.h" #if defined(OS_CHROMEOS) #include "chromeos/chromeos_switches.h" @@ -234,7 +234,7 @@ class BrowserCloseManagerBrowserTest content::BrowserContext::GetDownloadManager(browser->profile()), 1); ui_test_utils::NavigateToURLWithDisposition( browser, - GURL(content::URLRequestSlowDownloadJob::kKnownSizeUrl), + GURL(net::URLRequestSlowDownloadJob::kKnownSizeUrl), NEW_BACKGROUND_TAB, ui_test_utils::BROWSER_TEST_NONE); observer.WaitForFinished(); diff --git a/chrome/browser/net/url_request_mock_util.cc b/chrome/browser/net/url_request_mock_util.cc index cd956bb..29e359d 100644 --- a/chrome/browser/net/url_request_mock_util.cc +++ b/chrome/browser/net/url_request_mock_util.cc @@ -10,9 +10,9 @@ #include "base/threading/thread_restrictions.h" #include "chrome/common/chrome_paths.h" #include "content/public/browser/browser_thread.h" -#include "content/test/net/url_request_slow_download_job.h" #include "net/test/url_request/url_request_failed_job.h" #include "net/test/url_request/url_request_mock_http_job.h" +#include "net/test/url_request/url_request_slow_download_job.h" #include "net/url_request/url_request_filter.h" using content::BrowserThread; @@ -33,7 +33,7 @@ void SetUrlRequestMocksEnabled(bool enabled) { net::URLRequestFilter::GetInstance()->ClearHandlers(); net::URLRequestFailedJob::AddUrlHandler(); - content::URLRequestSlowDownloadJob::AddUrlHandler(); + net::URLRequestSlowDownloadJob::AddUrlHandler(); base::FilePath root_http; PathService::Get(chrome::DIR_TEST_DATA, &root_http); diff --git a/chrome/browser/policy/DEPS b/chrome/browser/policy/DEPS index c6c56db..c7fab80 100644 --- a/chrome/browser/policy/DEPS +++ b/chrome/browser/policy/DEPS @@ -5,5 +5,4 @@ include_rules = [ "+content/public/browser", "+content/public/common", "+content/public/test", - "+content/test/net", ] diff --git a/chrome/browser/policy/cloud/DEPS b/chrome/browser/policy/cloud/DEPS index d0a9131..422fa2a 100644 --- a/chrome/browser/policy/cloud/DEPS +++ b/chrome/browser/policy/cloud/DEPS @@ -36,7 +36,6 @@ specific_include_rules = { "+content/public/browser", "+content/public/common", "+content/public/test", - "+content/test/net", ], r"cloud_policy_invalidator_unittest\.cc": [ diff --git a/chrome/browser/ui/browser_close_browsertest.cc b/chrome/browser/ui/browser_close_browsertest.cc index c3251a6..e9183c6 100644 --- a/chrome/browser/ui/browser_close_browsertest.cc +++ b/chrome/browser/ui/browser_close_browsertest.cc @@ -28,14 +28,13 @@ #include "content/public/browser/download_item.h" #include "content/public/test/browser_test_utils.h" #include "content/public/test/download_test_observer.h" -#include "content/test/net/url_request_slow_download_job.h" +#include "net/test/url_request/url_request_slow_download_job.h" #include "ui/base/page_transition_types.h" using content::BrowserContext; using content::BrowserThread; using content::DownloadItem; using content::DownloadManager; -using content::URLRequestSlowDownloadJob; class BrowserCloseTest : public InProcessBrowserTest { public: @@ -112,7 +111,7 @@ class BrowserCloseTest : public InProcessBrowserTest { // provide any more data) so that we can test closing the // browser with active downloads. void CreateStalledDownloads(Browser* browser, int num_downloads) { - GURL url(URLRequestSlowDownloadJob::kKnownSizeUrl); + GURL url(net::URLRequestSlowDownloadJob::kKnownSizeUrl); if (num_downloads == 0) return; @@ -141,7 +140,7 @@ class BrowserCloseTest : public InProcessBrowserTest { // All all downloads created in CreateStalledDownloads() to // complete, and block in this routine until they do complete. void CompleteAllDownloads(Browser* browser) { - GURL finish_url(URLRequestSlowDownloadJob::kFinishDownloadUrl); + GURL finish_url(net::URLRequestSlowDownloadJob::kFinishDownloadUrl); ui_test_utils::NavigateToURL(browser, finish_url); // Go through and, for every single profile, wait until there are diff --git a/content/browser/download/download_browsertest.cc b/content/browser/download/download_browsertest.cc index 9ba699b..d8354b2 100644 --- a/content/browser/download/download_browsertest.cc +++ b/content/browser/download/download_browsertest.cc @@ -34,12 +34,12 @@ #include "content/shell/browser/shell_browser_context.h" #include "content/shell/browser/shell_download_manager_delegate.h" #include "content/shell/browser/shell_network_delegate.h" -#include "content/test/net/url_request_slow_download_job.h" #include "net/test/embedded_test_server/embedded_test_server.h" #include "net/test/embedded_test_server/http_request.h" #include "net/test/embedded_test_server/http_response.h" #include "net/test/spawned_test_server/spawned_test_server.h" #include "net/test/url_request/url_request_mock_http_job.h" +#include "net/test/url_request/url_request_slow_download_job.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" #include "url/gurl.h" @@ -583,7 +583,7 @@ class DownloadContentTest : public ContentBrowserTest { BrowserThread::PostTask( BrowserThread::IO, FROM_HERE, - base::Bind(&URLRequestSlowDownloadJob::AddUrlHandler)); + base::Bind(&net::URLRequestSlowDownloadJob::AddUrlHandler)); base::FilePath mock_base(GetTestFilePath("download", "")); BrowserThread::PostTask( BrowserThread::IO, @@ -747,7 +747,7 @@ class DownloadContentTest : public ContentBrowserTest { private: static void EnsureNoPendingDownloadJobsOnIO(bool* result) { - if (URLRequestSlowDownloadJob::NumberOutstandingRequests()) + if (net::URLRequestSlowDownloadJob::NumberOutstandingRequests()) *result = false; BrowserThread::PostTask( BrowserThread::UI, FROM_HERE, base::MessageLoop::QuitClosure()); @@ -765,7 +765,7 @@ IN_PROC_BROWSER_TEST_F(DownloadContentTest, DownloadCancelled) { // we're in the expected state. scoped_ptr<DownloadCreateObserver> observer( CreateInProgressWaiter(shell(), 1)); - NavigateToURL(shell(), GURL(URLRequestSlowDownloadJob::kUnknownSizeUrl)); + NavigateToURL(shell(), GURL(net::URLRequestSlowDownloadJob::kUnknownSizeUrl)); observer->WaitForFinished(); std::vector<DownloadItem*> downloads; @@ -792,7 +792,7 @@ IN_PROC_BROWSER_TEST_F(DownloadContentTest, MultiDownload) { // we're in the expected state. scoped_ptr<DownloadCreateObserver> observer1( CreateInProgressWaiter(shell(), 1)); - NavigateToURL(shell(), GURL(URLRequestSlowDownloadJob::kUnknownSizeUrl)); + NavigateToURL(shell(), GURL(net::URLRequestSlowDownloadJob::kUnknownSizeUrl)); observer1->WaitForFinished(); std::vector<DownloadItem*> downloads; @@ -819,7 +819,8 @@ IN_PROC_BROWSER_TEST_F(DownloadContentTest, MultiDownload) { // Allow the first request to finish. scoped_ptr<DownloadTestObserver> observer2(CreateWaiter(shell(), 1)); - NavigateToURL(shell(), GURL(URLRequestSlowDownloadJob::kFinishDownloadUrl)); + NavigateToURL(shell(), + GURL(net::URLRequestSlowDownloadJob::kFinishDownloadUrl)); observer2->WaitForFinished(); // Wait for the third request. EXPECT_EQ(1u, observer2->NumDownloadsSeenInState(DownloadItem::COMPLETE)); @@ -831,8 +832,8 @@ IN_PROC_BROWSER_TEST_F(DownloadContentTest, MultiDownload) { // |file1| should be full of '*'s, and |file2| should be the same as the // source file. base::FilePath file1(download1->GetTargetFilePath()); - size_t file_size1 = URLRequestSlowDownloadJob::kFirstDownloadSize + - URLRequestSlowDownloadJob::kSecondDownloadSize; + size_t file_size1 = net::URLRequestSlowDownloadJob::kFirstDownloadSize + + net::URLRequestSlowDownloadJob::kSecondDownloadSize; std::string expected_contents(file_size1, '*'); ASSERT_TRUE(VerifyFile(file1, expected_contents, file_size1)); @@ -971,7 +972,7 @@ IN_PROC_BROWSER_TEST_F(DownloadContentTest, ShutdownInProgress) { // Create a download that won't complete. scoped_ptr<DownloadCreateObserver> observer( CreateInProgressWaiter(shell(), 1)); - NavigateToURL(shell(), GURL(URLRequestSlowDownloadJob::kUnknownSizeUrl)); + NavigateToURL(shell(), GURL(net::URLRequestSlowDownloadJob::kUnknownSizeUrl)); observer->WaitForFinished(); // Get the item. diff --git a/content/browser/download/drag_download_file_browsertest.cc b/content/browser/download/drag_download_file_browsertest.cc index 0b1de7d..eee79ea 100644 --- a/content/browser/download/drag_download_file_browsertest.cc +++ b/content/browser/download/drag_download_file_browsertest.cc @@ -22,7 +22,6 @@ #include "content/shell/browser/shell.h" #include "content/shell/browser/shell_browser_context.h" #include "content/shell/browser/shell_download_manager_delegate.h" -#include "content/test/net/url_request_slow_download_job.h" #include "net/test/url_request/url_request_mock_http_job.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" diff --git a/content/content_tests.gypi b/content/content_tests.gypi index 3b0ee34..e8c7528 100644 --- a/content/content_tests.gypi +++ b/content/content_tests.gypi @@ -139,8 +139,6 @@ 'test/mock_weburlloader.h', 'test/net/url_request_abort_on_end_job.cc', 'test/net/url_request_abort_on_end_job.h', - 'test/net/url_request_slow_download_job.cc', - 'test/net/url_request_slow_download_job.h', 'test/ppapi_unittest.cc', 'test/ppapi_unittest.h', 'test/render_thread_impl_browser_test_ipc_helper.cc', diff --git a/net/BUILD.gn b/net/BUILD.gn index 0cdb77a..90bf394 100644 --- a/net/BUILD.gn +++ b/net/BUILD.gn @@ -680,6 +680,8 @@ source_set("test_support") { "test/url_request/url_request_mock_data_job.h", "test/url_request/url_request_mock_http_job.cc", "test/url_request/url_request_mock_http_job.h", + "test/url_request/url_request_slow_download_job.cc", + "test/url_request/url_request_slow_download_job.h", "url_request/test_url_fetcher_factory.cc", "url_request/test_url_fetcher_factory.h", "url_request/test_url_request_interceptor.cc", diff --git a/net/net.gyp b/net/net.gyp index 1273da5..834a4c6 100644 --- a/net/net.gyp +++ b/net/net.gyp @@ -968,6 +968,8 @@ 'test/url_request/url_request_mock_data_job.h', 'test/url_request/url_request_mock_http_job.cc', 'test/url_request/url_request_mock_http_job.h', + 'test/url_request/url_request_slow_download_job.cc', + 'test/url_request/url_request_slow_download_job.h', 'url_request/test_url_fetcher_factory.cc', 'url_request/test_url_fetcher_factory.h', 'url_request/test_url_request_interceptor.cc', diff --git a/content/test/net/url_request_slow_download_job.cc b/net/test/url_request/url_request_slow_download_job.cc index 1d813a2..718a9f0 100644 --- a/content/test/net/url_request_slow_download_job.cc +++ b/net/test/url_request/url_request_slow_download_job.cc @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "content/test/net/url_request_slow_download_job.h" +#include "net/test/url_request/url_request_slow_download_job.h" #include "base/bind.h" #include "base/compiler_specific.h" @@ -10,7 +10,6 @@ #include "base/message_loop/message_loop.h" #include "base/strings/string_util.h" #include "base/strings/stringprintf.h" -#include "content/public/browser/browser_thread.h" #include "net/base/io_buffer.h" #include "net/base/net_errors.h" #include "net/http/http_response_headers.h" @@ -18,16 +17,16 @@ #include "net/url_request/url_request_filter.h" #include "url/gurl.h" -namespace content { +namespace net { const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] = - "http://url.handled.by.slow.download/download-unknown-size"; + "http://url.handled.by.slow.download/download-unknown-size"; const char URLRequestSlowDownloadJob::kKnownSizeUrl[] = - "http://url.handled.by.slow.download/download-known-size"; + "http://url.handled.by.slow.download/download-known-size"; const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] = - "http://url.handled.by.slow.download/download-finish"; + "http://url.handled.by.slow.download/download-finish"; const char URLRequestSlowDownloadJob::kErrorDownloadUrl[] = - "http://url.handled.by.slow.download/download-error"; + "http://url.handled.by.slow.download/download-error"; const int URLRequestSlowDownloadJob::kFirstDownloadSize = 1024 * 35; const int URLRequestSlowDownloadJob::kSecondDownloadSize = 1024 * 10; @@ -38,14 +37,13 @@ base::LazyInstance<URLRequestSlowDownloadJob::SlowJobsSet>::Leaky void URLRequestSlowDownloadJob::Start() { base::MessageLoop::current()->PostTask( - FROM_HERE, - base::Bind(&URLRequestSlowDownloadJob::StartAsync, - weak_factory_.GetWeakPtr())); + FROM_HERE, base::Bind(&URLRequestSlowDownloadJob::StartAsync, + weak_factory_.GetWeakPtr())); } // static void URLRequestSlowDownloadJob::AddUrlHandler() { - net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance(); + URLRequestFilter* filter = URLRequestFilter::GetInstance(); filter->AddUrlHandler(GURL(kUnknownSizeUrl), &URLRequestSlowDownloadJob::Factory); filter->AddUrlHandler(GURL(kKnownSizeUrl), @@ -57,13 +55,12 @@ void URLRequestSlowDownloadJob::AddUrlHandler() { } // static -net::URLRequestJob* URLRequestSlowDownloadJob::Factory( - net::URLRequest* request, - net::NetworkDelegate* network_delegate, +URLRequestJob* URLRequestSlowDownloadJob::Factory( + URLRequest* request, + NetworkDelegate* network_delegate, const std::string& scheme) { - DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); - URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob( - request, network_delegate); + URLRequestSlowDownloadJob* job = + new URLRequestSlowDownloadJob(request, network_delegate); if (request->url().spec() != kFinishDownloadUrl && request->url().spec() != kErrorDownloadUrl) pending_requests_.Get().insert(job); @@ -72,32 +69,30 @@ net::URLRequestJob* URLRequestSlowDownloadJob::Factory( // static size_t URLRequestSlowDownloadJob::NumberOutstandingRequests() { - DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); return pending_requests_.Get().size(); } // static void URLRequestSlowDownloadJob::FinishPendingRequests() { typedef std::set<URLRequestSlowDownloadJob*> JobList; - DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); - for (JobList::iterator it = pending_requests_.Get().begin(); it != - pending_requests_.Get().end(); ++it) { + for (JobList::iterator it = pending_requests_.Get().begin(); + it != pending_requests_.Get().end(); ++it) { (*it)->set_should_finish_download(); } } void URLRequestSlowDownloadJob::ErrorPendingRequests() { typedef std::set<URLRequestSlowDownloadJob*> JobList; - DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); - for (JobList::iterator it = pending_requests_.Get().begin(); it != - pending_requests_.Get().end(); ++it) { + for (JobList::iterator it = pending_requests_.Get().begin(); + it != pending_requests_.Get().end(); ++it) { (*it)->set_should_error_download(); } } URLRequestSlowDownloadJob::URLRequestSlowDownloadJob( - net::URLRequest* request, net::NetworkDelegate* network_delegate) - : net::URLRequestJob(request, network_delegate), + URLRequest* request, + NetworkDelegate* network_delegate) + : URLRequestJob(request, network_delegate), bytes_already_sent_(0), should_error_download_(false), should_finish_download_(false), @@ -134,11 +129,12 @@ void URLRequestSlowDownloadJob::StartAsync() { // out where in the state machine we are and how we should fill the buffer. // It returns an enum indicating the state of the read. URLRequestSlowDownloadJob::ReadStatus -URLRequestSlowDownloadJob::FillBufferHelper( - net::IOBuffer* buf, int buf_size, int* bytes_written) { +URLRequestSlowDownloadJob::FillBufferHelper(IOBuffer* buf, + int buf_size, + int* bytes_written) { if (bytes_already_sent_ < kFirstDownloadSize) { - int bytes_to_write = std::min(kFirstDownloadSize - bytes_already_sent_, - buf_size); + int bytes_to_write = + std::min(kFirstDownloadSize - bytes_already_sent_, buf_size); for (int i = 0; i < bytes_to_write; ++i) { buf->data()[i] = '*'; } @@ -165,19 +161,19 @@ URLRequestSlowDownloadJob::FillBufferHelper( return REQUEST_COMPLETE; } -bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, +bool URLRequestSlowDownloadJob::ReadRawData(IOBuffer* buf, + int buf_size, int* bytes_read) { if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str()) || - LowerCaseEqualsASCII(kErrorDownloadUrl, - request_->url().spec().c_str())) { + LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl."; *bytes_read = 0; return true; } - VLOG(10) << __FUNCTION__ << " called at position " - << bytes_already_sent_ << " in the stream."; + VLOG(10) << __FUNCTION__ << " called at position " << bytes_already_sent_ + << " in the stream."; ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read); switch (status) { case BUFFER_FILLED: @@ -185,11 +181,10 @@ bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size, case REQUEST_BLOCKED: buffer_ = buf; buffer_size_ = buf_size; - SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); + SetStatus(URLRequestStatus(URLRequestStatus::IO_PENDING, 0)); base::MessageLoop::current()->PostDelayedTask( - FROM_HERE, - base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, - weak_factory_.GetWeakPtr()), + FROM_HERE, base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, + weak_factory_.GetWeakPtr()), base::TimeDelta::FromMilliseconds(100)); return false; case REQUEST_COMPLETE: @@ -208,67 +203,63 @@ void URLRequestSlowDownloadJob::CheckDoneStatus() { ReadStatus status = FillBufferHelper(buffer_.get(), buffer_size_, &bytes_written); DCHECK_EQ(BUFFER_FILLED, status); - buffer_ = NULL; // Release the reference. - SetStatus(net::URLRequestStatus()); + buffer_ = NULL; // Release the reference. + SetStatus(URLRequestStatus()); NotifyReadComplete(bytes_written); } else if (should_error_download_) { VLOG(10) << __FUNCTION__ << " called w/ should_finish_ownload_ set."; - NotifyDone(net::URLRequestStatus( - net::URLRequestStatus::FAILED, net::ERR_CONNECTION_RESET)); + NotifyDone( + URLRequestStatus(URLRequestStatus::FAILED, ERR_CONNECTION_RESET)); } else { base::MessageLoop::current()->PostDelayedTask( - FROM_HERE, - base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, - weak_factory_.GetWeakPtr()), + FROM_HERE, base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus, + weak_factory_.GetWeakPtr()), base::TimeDelta::FromMilliseconds(100)); } } // Public virtual version. -void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) { +void URLRequestSlowDownloadJob::GetResponseInfo(HttpResponseInfo* info) { // Forward to private const version. GetResponseInfoConst(info); } URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() { - DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); pending_requests_.Get().erase(this); } // Private const version. void URLRequestSlowDownloadJob::GetResponseInfoConst( - net::HttpResponseInfo* info) const { + HttpResponseInfo* info) const { // Send back mock headers. std::string raw_headers; if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str()) || - LowerCaseEqualsASCII(kErrorDownloadUrl, - request_->url().spec().c_str())) { + LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str())) { raw_headers.append( - "HTTP/1.1 200 OK\n" - "Content-type: text/plain\n"); + "HTTP/1.1 200 OK\n" + "Content-type: text/plain\n"); } else { raw_headers.append( - "HTTP/1.1 200 OK\n" - "Content-type: application/octet-stream\n" - "Cache-Control: max-age=0\n"); + "HTTP/1.1 200 OK\n" + "Content-type: application/octet-stream\n" + "Cache-Control: max-age=0\n"); if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) { raw_headers.append(base::StringPrintf( - "Content-Length: %d\n", - kFirstDownloadSize + kSecondDownloadSize)); + "Content-Length: %d\n", kFirstDownloadSize + kSecondDownloadSize)); } } // ParseRawHeaders expects \0 to end each header line. ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1)); - info->headers = new net::HttpResponseHeaders(raw_headers); + info->headers = new HttpResponseHeaders(raw_headers); } bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const { - net::HttpResponseInfo info; + HttpResponseInfo info; GetResponseInfoConst(&info); return info.headers.get() && info.headers->GetMimeType(mime_type); } -} // namespace content +} // namespace net diff --git a/content/test/net/url_request_slow_download_job.h b/net/test/url_request/url_request_slow_download_job.h index 08b6dc0..a264a84 100644 --- a/content/test/net/url_request_slow_download_job.h +++ b/net/test/url_request/url_request_slow_download_job.h @@ -1,13 +1,12 @@ // Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// This class simulates a slow download. This used in a UI test to test the -// download manager. Requests to |kUnknownSizeUrl| and |kKnownSizeUrl| start -// downloads that pause after the first N bytes, to be completed by sending a -// request to |kFinishDownloadUrl|. +// This class simulates a slow download. Requests to |kUnknownSizeUrl| and +// |kKnownSizeUrl| start downloads that pause after the first N bytes, to be +// completed by sending a request to |kFinishDownloadUrl|. -#ifndef CONTENT_TEST_NET_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_ -#define CONTENT_TEST_NET_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_ +#ifndef NET_TEST_URL_REQUEST_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_ +#define NET_TEST_URL_REQUEST_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_ #include <set> #include <string> @@ -16,9 +15,9 @@ #include "base/memory/weak_ptr.h" #include "net/url_request/url_request_job.h" -namespace content { +namespace net { -class URLRequestSlowDownloadJob : public net::URLRequestJob { +class URLRequestSlowDownloadJob : public URLRequestJob { public: // Test URLs. static const char kUnknownSizeUrl[]; @@ -34,26 +33,26 @@ class URLRequestSlowDownloadJob : public net::URLRequestJob { // send the second chunk. void CheckDoneStatus(); - // net::URLRequestJob methods + // URLRequestJob methods void Start() override; bool GetMimeType(std::string* mime_type) const override; - void GetResponseInfo(net::HttpResponseInfo* info) override; - bool ReadRawData(net::IOBuffer* buf, int buf_size, int* bytes_read) override; + void GetResponseInfo(HttpResponseInfo* info) override; + bool ReadRawData(IOBuffer* buf, int buf_size, int* bytes_read) override; - static net::URLRequestJob* Factory(net::URLRequest* request, - net::NetworkDelegate* network_delegate, - const std::string& scheme); + static URLRequestJob* Factory(URLRequest* request, + NetworkDelegate* network_delegate, + const std::string& scheme); // Returns the current number of URLRequestSlowDownloadJobs that have // not yet completed. static size_t NumberOutstandingRequests(); - // Adds the testing URLs to the net::URLRequestFilter. + // Adds the testing URLs to the URLRequestFilter. static void AddUrlHandler(); private: - URLRequestSlowDownloadJob(net::URLRequest* request, - net::NetworkDelegate* network_delegate); + URLRequestSlowDownloadJob(URLRequest* request, + NetworkDelegate* network_delegate); ~URLRequestSlowDownloadJob() override; // Enum indicating where we are in the read after a call to @@ -70,12 +69,9 @@ class URLRequestSlowDownloadJob : public net::URLRequestJob { // all the data. REQUEST_COMPLETE }; - ReadStatus FillBufferHelper( - net::IOBuffer* buf, - int buf_size, - int* bytes_written); + ReadStatus FillBufferHelper(IOBuffer* buf, int buf_size, int* bytes_written); - void GetResponseInfoConst(net::HttpResponseInfo* info) const; + void GetResponseInfoConst(HttpResponseInfo* info) const; // Mark all pending requests to be finished. We keep track of pending // requests in |pending_requests_|. @@ -92,7 +88,7 @@ class URLRequestSlowDownloadJob : public net::URLRequestJob { int bytes_already_sent_; bool should_error_download_; bool should_finish_download_; - scoped_refptr<net::IOBuffer> buffer_; + scoped_refptr<IOBuffer> buffer_; int buffer_size_; base::WeakPtrFactory<URLRequestSlowDownloadJob> weak_factory_; @@ -100,4 +96,4 @@ class URLRequestSlowDownloadJob : public net::URLRequestJob { } // namespace content -#endif // CONTENT_TEST_NET_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_ +#endif // NET_TEST_URL_REQUEST_URL_REQUEST_SLOW_DOWNLOAD_JOB_H_ |