summaryrefslogtreecommitdiffstats
path: root/net/proxy
diff options
context:
space:
mode:
authoreroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-09-18 21:10:04 +0000
committereroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-09-18 21:10:04 +0000
commitd3794cf92b0b13181067e9cddaad436c01e009c9 (patch)
tree04468e6793769bbe5941485142f72c5caa164537 /net/proxy
parent1877a22169d10e18287d334f0259ac96d3ab1a52 (diff)
downloadchromium_src-d3794cf92b0b13181067e9cddaad436c01e009c9.zip
chromium_src-d3794cf92b0b13181067e9cddaad436c01e009c9.tar.gz
chromium_src-d3794cf92b0b13181067e9cddaad436c01e009c9.tar.bz2
Simplify a unittest by removing the helper thread.
This is test clean-up -- the way I had originally structured that test with synch wrappers was overly complicated. I am also hoping that this refactor will obviate 15147, which looks like it could be a data race in ProxyScriptFetcherTest.NoCache due to having multiple concurrent IO threads (the test helper thread, as well as the internal IO thread that HttpTestServer spawns for MakeGetRequest). BUG=http://crbug.com/15147 Review URL: http://codereview.chromium.org/211006 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@26611 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/proxy')
-rw-r--r--net/proxy/proxy_script_fetcher_unittest.cc279
1 files changed, 116 insertions, 163 deletions
diff --git a/net/proxy/proxy_script_fetcher_unittest.cc b/net/proxy/proxy_script_fetcher_unittest.cc
index e56e4a8..e94e20b 100644
--- a/net/proxy/proxy_script_fetcher_unittest.cc
+++ b/net/proxy/proxy_script_fetcher_unittest.cc
@@ -9,6 +9,7 @@
#include "base/path_service.h"
#include "net/base/net_util.h"
#include "net/base/ssl_config_service_defaults.h"
+#include "net/base/test_completion_callback.h"
#include "net/disk_cache/disk_cache.h"
#include "net/http/http_cache.h"
#include "net/url_request/url_request_unittest.h"
@@ -45,116 +46,6 @@ class RequestContext : public URLRequestContext {
}
};
-// Helper for doing synch fetches. This object lives in SynchFetcher's
-// |io_thread_| and communicates with SynchFetcher though (|result|, |event|).
-class SynchFetcherThreadHelper {
- public:
- SynchFetcherThreadHelper(base::WaitableEvent* event, FetchResult* result)
- : event_(event),
- fetch_result_(result),
- url_request_context_(NULL),
- fetcher_(NULL),
- ALLOW_THIS_IN_INITIALIZER_LIST(
- callback_(this, &SynchFetcherThreadHelper::OnFetchCompletion)) {
- url_request_context_ = new RequestContext;
- fetcher_.reset(net::ProxyScriptFetcher::Create(url_request_context_.get()));
- }
-
- // Starts fetching the script at |url|. Upon completion |event_| will be
- // signalled, and the bytes read will have been written to |fetch_result_|.
- void Start(const GURL& url) {
- int rv = fetcher_->Fetch(url, &fetch_result_->bytes, &callback_);
- EXPECT_EQ(net::ERR_IO_PENDING, rv);
- }
-
- void OnFetchCompletion(int result) {
- fetch_result_->code = result;
- event_->Signal();
- }
-
- private:
- base::WaitableEvent* event_;
- FetchResult* fetch_result_;
-
- scoped_refptr<URLRequestContext> url_request_context_;
-
- scoped_ptr<net::ProxyScriptFetcher> fetcher_;
- net::CompletionCallbackImpl<SynchFetcherThreadHelper> callback_;
-};
-
-// Helper that wraps ProxyScriptFetcher::Fetch() with a synchronous interface.
-// It executes Fetch() on a helper thread (IO_Thread).
-class SynchFetcher {
- public:
- SynchFetcher()
- : event_(false, false),
- io_thread_("IO_Thread"),
- thread_helper_(NULL) {
- // Start an IO thread.
- base::Thread::Options options;
- options.message_loop_type = MessageLoop::TYPE_IO;
- io_thread_.StartWithOptions(options);
-
- // Initialize the state in |io_thread_|.
- io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod(
- this, &SynchFetcher::Init));
- Wait();
- }
-
- ~SynchFetcher() {
- // Tear down the state in |io_thread_|.
- io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod(
- this, &SynchFetcher::Cleanup));
- Wait();
- }
-
- // Synchronously fetch the url.
- FetchResult Fetch(const GURL& url) {
- io_thread_.message_loop()->PostTask(FROM_HERE, NewRunnableMethod(
- this, &SynchFetcher::AsynchFetch, url));
- Wait();
- return fetch_result_;
- }
-
- private:
- // [Runs on |io_thread_|] Allocates the URLRequestContext and the
- // ProxyScriptFetcher, which live inside |thread_helper_|.
- void Init() {
- thread_helper_ = new SynchFetcherThreadHelper(&event_, &fetch_result_);
- event_.Signal();
- }
-
- // [Runs on |io_thread_|] Signals |event_| on completion.
- void AsynchFetch(const GURL& url) {
- thread_helper_->Start(url);
- }
-
- // [Runs on |io_thread_|] Signals |event_| on cleanup completion.
- void Cleanup() {
- delete thread_helper_;
- thread_helper_ = NULL;
- MessageLoop::current()->RunAllPending();
- event_.Signal();
- }
-
- void Wait() {
- event_.Wait();
- event_.Reset();
- }
-
- base::WaitableEvent event_;
- base::Thread io_thread_;
- FetchResult fetch_result_;
- // Holds all the state that lives on the IO thread, for easy cleanup.
- SynchFetcherThreadHelper* thread_helper_;
-};
-
-// Template specialization so SynchFetcher does not have to be refcounted.
-template<>
-void RunnableMethodTraits<SynchFetcher>::RetainCallee(SynchFetcher* remover) {}
-template<>
-void RunnableMethodTraits<SynchFetcher>::ReleaseCallee(SynchFetcher* remover) {}
-
// Required to be in net namespace by FRIEND_TEST.
namespace net {
@@ -165,24 +56,34 @@ GURL GetTestFileUrl(const std::string& relpath) {
path = path.AppendASCII("net");
path = path.AppendASCII("data");
path = path.AppendASCII("proxy_script_fetcher_unittest");
- GURL base_url = net::FilePathToFileURL(path);
+ GURL base_url = FilePathToFileURL(path);
return GURL(base_url.spec() + "/" + relpath);
}
typedef PlatformTest ProxyScriptFetcherTest;
TEST_F(ProxyScriptFetcherTest, FileUrl) {
- SynchFetcher pac_fetcher;
+ scoped_refptr<URLRequestContext> context = new RequestContext;
+ scoped_ptr<ProxyScriptFetcher> pac_fetcher(
+ ProxyScriptFetcher::Create(context));
{ // Fetch a non-existent file.
- FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"));
- EXPECT_EQ(net::ERR_FILE_NOT_FOUND, result.code);
- EXPECT_TRUE(result.bytes.empty());
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(GetTestFileUrl("does-not-exist"),
+ &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult());
+ EXPECT_TRUE(bytes.empty());
}
{ // Fetch a file that exists.
- FetchResult result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"));
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-pac.txt-\n", result.bytes);
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(GetTestFileUrl("pac.txt"),
+ &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-pac.txt-\n", bytes);
}
}
@@ -192,25 +93,36 @@ TEST_F(ProxyScriptFetcherTest, HttpMimeType) {
scoped_refptr<HTTPTestServer> server =
HTTPTestServer::CreateServer(kDocRoot, NULL);
ASSERT_TRUE(NULL != server.get());
- SynchFetcher pac_fetcher;
+ scoped_refptr<URLRequestContext> context = new RequestContext;
+ scoped_ptr<ProxyScriptFetcher> pac_fetcher(
+ ProxyScriptFetcher::Create(context));
{ // Fetch a PAC with mime type "text/plain"
GURL url = server->TestServerPage("files/pac.txt");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-pac.txt-\n", result.bytes);
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-pac.txt-\n", bytes);
}
{ // Fetch a PAC with mime type "text/html"
GURL url = server->TestServerPage("files/pac.html");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-pac.html-\n", result.bytes);
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-pac.html-\n", bytes);
}
{ // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig"
GURL url = server->TestServerPage("files/pac.nsproxy");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-pac.nsproxy-\n", result.bytes);
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-pac.nsproxy-\n", bytes);
}
}
@@ -218,19 +130,27 @@ TEST_F(ProxyScriptFetcherTest, HttpStatusCode) {
scoped_refptr<HTTPTestServer> server =
HTTPTestServer::CreateServer(kDocRoot, NULL);
ASSERT_TRUE(NULL != server.get());
- SynchFetcher pac_fetcher;
+ scoped_refptr<URLRequestContext> context = new RequestContext;
+ scoped_ptr<ProxyScriptFetcher> pac_fetcher(
+ ProxyScriptFetcher::Create(context));
{ // Fetch a PAC which gives a 500 -- FAIL
GURL url = server->TestServerPage("files/500.pac");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code);
- EXPECT_TRUE(result.bytes.empty());
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
+ EXPECT_TRUE(bytes.empty());
}
{ // Fetch a PAC which gives a 404 -- FAIL
GURL url = server->TestServerPage("files/404.pac");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::ERR_PAC_STATUS_NOT_OK, result.code);
- EXPECT_TRUE(result.bytes.empty());
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
+ EXPECT_TRUE(bytes.empty());
}
}
@@ -238,27 +158,39 @@ TEST_F(ProxyScriptFetcherTest, ContentDisposition) {
scoped_refptr<HTTPTestServer> server =
HTTPTestServer::CreateServer(kDocRoot, NULL);
ASSERT_TRUE(NULL != server.get());
- SynchFetcher pac_fetcher;
+ scoped_refptr<URLRequestContext> context = new RequestContext;
+ scoped_ptr<ProxyScriptFetcher> pac_fetcher(
+ ProxyScriptFetcher::Create(context));
// Fetch PAC scripts via HTTP with a Content-Disposition header -- should
// have no effect.
GURL url = server->TestServerPage("files/downloadable.pac");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-downloadable.pac-\n", result.bytes);
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-downloadable.pac-\n", bytes);
}
TEST_F(ProxyScriptFetcherTest, NoCache) {
scoped_refptr<HTTPTestServer> server =
HTTPTestServer::CreateServer(kDocRoot, NULL);
ASSERT_TRUE(NULL != server.get());
- SynchFetcher pac_fetcher;
+ scoped_refptr<URLRequestContext> context = new RequestContext;
+ scoped_ptr<ProxyScriptFetcher> pac_fetcher(
+ ProxyScriptFetcher::Create(context));
// Fetch a PAC script whose HTTP headers make it cacheable for 1 hour.
GURL url = server->TestServerPage("files/cacheable_1hr.pac");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-cacheable_1hr.pac-\n", result.bytes);
+ {
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-cacheable_1hr.pac-\n", bytes);
+ }
// Now kill the HTTP server.
server->SendQuit();
@@ -268,18 +200,25 @@ TEST_F(ProxyScriptFetcherTest, NoCache) {
// Try to fetch the file again -- if should fail, since the server is not
// running anymore. (If it were instead being loaded from cache, we would
// get a success.
- result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::ERR_CONNECTION_REFUSED, result.code);
+ {
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(ERR_CONNECTION_REFUSED, callback.WaitForResult());
+ }
}
TEST_F(ProxyScriptFetcherTest, TooLarge) {
scoped_refptr<HTTPTestServer> server =
HTTPTestServer::CreateServer(kDocRoot, NULL);
ASSERT_TRUE(NULL != server.get());
- SynchFetcher pac_fetcher;
+ scoped_refptr<URLRequestContext> context = new RequestContext;
+ scoped_ptr<ProxyScriptFetcher> pac_fetcher(
+ ProxyScriptFetcher::Create(context));
// Set the maximum response size to 50 bytes.
- int prev_size = net::ProxyScriptFetcher::SetSizeConstraintForUnittest(50);
+ int prev_size = ProxyScriptFetcher::SetSizeConstraintForUnittest(50);
// These two URLs are the same file, but are http:// vs file://
GURL urls[] = {
@@ -291,19 +230,25 @@ TEST_F(ProxyScriptFetcherTest, TooLarge) {
// after 50 bytes have been read, and fail with a too large error.
for (size_t i = 0; i < arraysize(urls); ++i) {
const GURL& url = urls[i];
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::ERR_FILE_TOO_BIG, result.code);
- EXPECT_TRUE(result.bytes.empty());
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult());
+ EXPECT_TRUE(bytes.empty());
}
// Restore the original size bound.
- net::ProxyScriptFetcher::SetSizeConstraintForUnittest(prev_size);
+ ProxyScriptFetcher::SetSizeConstraintForUnittest(prev_size);
{ // Make sure we can still fetch regular URLs.
GURL url = server->TestServerPage("files/pac.nsproxy");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-pac.nsproxy-\n", result.bytes);
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-pac.nsproxy-\n", bytes);
}
}
@@ -311,28 +256,36 @@ TEST_F(ProxyScriptFetcherTest, Hang) {
scoped_refptr<HTTPTestServer> server =
HTTPTestServer::CreateServer(kDocRoot, NULL);
ASSERT_TRUE(NULL != server.get());
- SynchFetcher pac_fetcher;
+ scoped_refptr<URLRequestContext> context = new RequestContext;
+ scoped_ptr<ProxyScriptFetcher> pac_fetcher(
+ ProxyScriptFetcher::Create(context));
// Set the timeout period to 0.5 seconds.
int prev_timeout =
- net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(500);
+ ProxyScriptFetcher::SetTimeoutConstraintForUnittest(500);
// Try fetching a URL which takes 1.2 seconds. We should abort the request
// after 500 ms, and fail with a timeout error.
{ GURL url = server->TestServerPage("slow/proxy.pac?1.2");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::ERR_TIMED_OUT, result.code);
- EXPECT_TRUE(result.bytes.empty());
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult());
+ EXPECT_TRUE(bytes.empty());
}
// Restore the original timeout period.
- net::ProxyScriptFetcher::SetTimeoutConstraintForUnittest(prev_timeout);
+ ProxyScriptFetcher::SetTimeoutConstraintForUnittest(prev_timeout);
{ // Make sure we can still fetch regular URLs.
GURL url = server->TestServerPage("files/pac.nsproxy");
- FetchResult result = pac_fetcher.Fetch(url);
- EXPECT_EQ(net::OK, result.code);
- EXPECT_EQ("-pac.nsproxy-\n", result.bytes);
+ std::string bytes;
+ TestCompletionCallback callback;
+ int result = pac_fetcher->Fetch(url, &bytes, &callback);
+ EXPECT_EQ(ERR_IO_PENDING, result);
+ EXPECT_EQ(OK, callback.WaitForResult());
+ EXPECT_EQ("-pac.nsproxy-\n", bytes);
}
}