summaryrefslogtreecommitdiffstats
path: root/net/proxy
diff options
context:
space:
mode:
authortfarina@chromium.org <tfarina@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-01-19 11:48:19 +0000
committertfarina@chromium.org <tfarina@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-01-19 11:48:19 +0000
commitda968bc16c3a459b350e6b941af07c99ed201764 (patch)
tree2b545b1631ace146fde83064dbad50b15a65776d /net/proxy
parentd2ad7b4cf05551da4e832a6cc038f57e308ba8ca (diff)
downloadchromium_src-da968bc16c3a459b350e6b941af07c99ed201764.zip
chromium_src-da968bc16c3a459b350e6b941af07c99ed201764.tar.gz
chromium_src-da968bc16c3a459b350e6b941af07c99ed201764.tar.bz2
net: Remove prefix net:: from some places that already are in namespace net.
This should address the wtc review in http://codereview.chromium.org/6338002/ BUG=64263 TEST=trybots Review URL: http://codereview.chromium.org/6322003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@71787 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/proxy')
-rw-r--r--net/proxy/init_proxy_resolver_unittest.cc12
-rw-r--r--net/proxy/proxy_script_fetcher.h2
-rw-r--r--net/proxy/proxy_script_fetcher_impl.cc34
-rw-r--r--net/proxy/proxy_script_fetcher_impl.h30
-rw-r--r--net/proxy/proxy_script_fetcher_impl_unittest.cc22
-rw-r--r--net/proxy/proxy_service_unittest.cc10
6 files changed, 55 insertions, 55 deletions
diff --git a/net/proxy/init_proxy_resolver_unittest.cc b/net/proxy/init_proxy_resolver_unittest.cc
index 91097d0e..b0d416d 100644
--- a/net/proxy/init_proxy_resolver_unittest.cc
+++ b/net/proxy/init_proxy_resolver_unittest.cc
@@ -107,7 +107,7 @@ class RuleBasedProxyScriptFetcher : public ProxyScriptFetcher {
virtual void Cancel() {}
- virtual net::URLRequestContext* GetRequestContext() { return NULL; }
+ virtual URLRequestContext* GetRequestContext() { return NULL; }
private:
const Rules* rules_;
@@ -187,7 +187,7 @@ TEST(InitProxyResolverTest, CustomPacSucceeds) {
EXPECT_EQ(rule.text(), resolver.script_data()->utf16());
// Check the NetLog was filled correctly.
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_EQ(6u, entries.size());
@@ -224,7 +224,7 @@ TEST(InitProxyResolverTest, CustomPacFails1) {
EXPECT_EQ(NULL, resolver.script_data());
// Check the NetLog was filled correctly.
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_EQ(4u, entries.size());
@@ -338,7 +338,7 @@ TEST(InitProxyResolverTest, AutodetectFailCustomSuccess2) {
// Check the NetLog was filled correctly.
// (Note that the Fetch and Set states are repeated since both WPAD and custom
// PAC scripts are tried).
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_EQ(11u, entries.size());
@@ -453,7 +453,7 @@ TEST(InitProxyResolverTest, CustomPacFails1_WithPositiveDelay) {
EXPECT_EQ(NULL, resolver.script_data());
// Check the NetLog was filled correctly.
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_EQ(6u, entries.size());
@@ -493,7 +493,7 @@ TEST(InitProxyResolverTest, CustomPacFails1_WithNegativeDelay) {
EXPECT_EQ(NULL, resolver.script_data());
// Check the NetLog was filled correctly.
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_EQ(4u, entries.size());
diff --git a/net/proxy/proxy_script_fetcher.h b/net/proxy/proxy_script_fetcher.h
index e42d979..9829316 100644
--- a/net/proxy/proxy_script_fetcher.h
+++ b/net/proxy/proxy_script_fetcher.h
@@ -49,7 +49,7 @@ class ProxyScriptFetcher {
// Returns the request context that this fetcher uses to issue downloads,
// or NULL.
- virtual net::URLRequestContext* GetRequestContext() = 0;
+ virtual URLRequestContext* GetRequestContext() = 0;
};
} // namespace net
diff --git a/net/proxy/proxy_script_fetcher_impl.cc b/net/proxy/proxy_script_fetcher_impl.cc
index 0724127..7d22ed3 100644
--- a/net/proxy/proxy_script_fetcher_impl.cc
+++ b/net/proxy/proxy_script_fetcher_impl.cc
@@ -70,10 +70,10 @@ void ConvertResponseToUTF16(const std::string& charset,
} // namespace
ProxyScriptFetcherImpl::ProxyScriptFetcherImpl(
- net::URLRequestContext* url_request_context)
+ URLRequestContext* url_request_context)
: ALLOW_THIS_IN_INITIALIZER_LIST(task_factory_(this)),
url_request_context_(url_request_context),
- buf_(new net::IOBuffer(kBufSize)),
+ buf_(new IOBuffer(kBufSize)),
next_id_(0),
cur_request_(NULL),
cur_request_id_(0),
@@ -86,7 +86,7 @@ ProxyScriptFetcherImpl::ProxyScriptFetcherImpl(
}
ProxyScriptFetcherImpl::~ProxyScriptFetcherImpl() {
- // The net::URLRequest's destructor will cancel the outstanding request, and
+ // The URLRequest's destructor will cancel the outstanding request, and
// ensure that the delegate (this) is not called again.
}
@@ -99,7 +99,7 @@ int ProxyScriptFetcherImpl::Fetch(const GURL& url,
DCHECK(callback);
DCHECK(text);
- cur_request_.reset(new net::URLRequest(url, this));
+ cur_request_.reset(new URLRequest(url, this));
cur_request_->set_context(url_request_context_);
cur_request_->set_method("GET");
@@ -129,16 +129,16 @@ int ProxyScriptFetcherImpl::Fetch(const GURL& url,
}
void ProxyScriptFetcherImpl::Cancel() {
- // ResetCurRequestState will free the net::URLRequest, which will cause
+ // ResetCurRequestState will free the URLRequest, which will cause
// cancellation.
ResetCurRequestState();
}
-net::URLRequestContext* ProxyScriptFetcherImpl::GetRequestContext() {
+URLRequestContext* ProxyScriptFetcherImpl::GetRequestContext() {
return url_request_context_;
}
-void ProxyScriptFetcherImpl::OnAuthRequired(net::URLRequest* request,
+void ProxyScriptFetcherImpl::OnAuthRequired(URLRequest* request,
AuthChallengeInfo* auth_info) {
DCHECK_EQ(request, cur_request_.get());
// TODO(eroman):
@@ -147,7 +147,7 @@ void ProxyScriptFetcherImpl::OnAuthRequired(net::URLRequest* request,
request->CancelAuth();
}
-void ProxyScriptFetcherImpl::OnSSLCertificateError(net::URLRequest* request,
+void ProxyScriptFetcherImpl::OnSSLCertificateError(URLRequest* request,
int cert_error,
X509Certificate* cert) {
DCHECK_EQ(request, cur_request_.get());
@@ -157,7 +157,7 @@ void ProxyScriptFetcherImpl::OnSSLCertificateError(net::URLRequest* request,
request->Cancel();
}
-void ProxyScriptFetcherImpl::OnResponseStarted(net::URLRequest* request) {
+void ProxyScriptFetcherImpl::OnResponseStarted(URLRequest* request) {
DCHECK_EQ(request, cur_request_.get());
if (!request->status().is_success()) {
@@ -191,7 +191,7 @@ void ProxyScriptFetcherImpl::OnResponseStarted(net::URLRequest* request) {
ReadBody(request);
}
-void ProxyScriptFetcherImpl::OnReadCompleted(net::URLRequest* request,
+void ProxyScriptFetcherImpl::OnReadCompleted(URLRequest* request,
int num_bytes) {
DCHECK_EQ(request, cur_request_.get());
if (ConsumeBytesRead(request, num_bytes)) {
@@ -200,7 +200,7 @@ void ProxyScriptFetcherImpl::OnReadCompleted(net::URLRequest* request,
}
}
-void ProxyScriptFetcherImpl::OnResponseCompleted(net::URLRequest* request) {
+void ProxyScriptFetcherImpl::OnResponseCompleted(URLRequest* request) {
DCHECK_EQ(request, cur_request_.get());
// Use |result_code_| as the request's error if we have already set it to
@@ -211,7 +211,7 @@ void ProxyScriptFetcherImpl::OnResponseCompleted(net::URLRequest* request) {
FetchCompleted();
}
-void ProxyScriptFetcherImpl::ReadBody(net::URLRequest* request) {
+void ProxyScriptFetcherImpl::ReadBody(URLRequest* request) {
// Read as many bytes as are available synchronously.
while (true) {
int num_bytes;
@@ -226,7 +226,7 @@ void ProxyScriptFetcherImpl::ReadBody(net::URLRequest* request) {
}
}
-bool ProxyScriptFetcherImpl::ConsumeBytesRead(net::URLRequest* request,
+bool ProxyScriptFetcherImpl::ConsumeBytesRead(URLRequest* request,
int num_bytes) {
if (num_bytes <= 0) {
// Error while reading, or EOF.
@@ -260,9 +260,9 @@ void ProxyScriptFetcherImpl::FetchCompleted() {
int result_code = result_code_;
CompletionCallback* callback = callback_;
- // Hold a reference to the net::URLRequestContext to prevent re-entrancy from
- // ~net::URLRequestContext.
- scoped_refptr<net::URLRequestContext> context(cur_request_->context());
+ // Hold a reference to the URLRequestContext to prevent re-entrancy from
+ // ~URLRequestContext.
+ scoped_refptr<URLRequestContext> context(cur_request_->context());
ResetCurRequestState();
callback->Run(result_code);
@@ -277,7 +277,7 @@ void ProxyScriptFetcherImpl::ResetCurRequestState() {
}
void ProxyScriptFetcherImpl::OnTimeout(int id) {
- // Timeout tasks may outlive the net::URLRequest they reference. Make sure it
+ // Timeout tasks may outlive the URLRequest they reference. Make sure it
// is still applicable.
if (cur_request_id_ != id)
return;
diff --git a/net/proxy/proxy_script_fetcher_impl.h b/net/proxy/proxy_script_fetcher_impl.h
index 2dc0335..65c16f7 100644
--- a/net/proxy/proxy_script_fetcher_impl.h
+++ b/net/proxy/proxy_script_fetcher_impl.h
@@ -25,7 +25,7 @@ class URLRequestContext;
// Implementation of ProxyScriptFetcher that downloads scripts using the
// specified request context.
class ProxyScriptFetcherImpl : public ProxyScriptFetcher,
- public net::URLRequest::Delegate {
+ public URLRequest::Delegate {
public:
// Creates a ProxyScriptFetcher that issues requests through
// |url_request_context|. |url_request_context| must remain valid for the
@@ -33,7 +33,7 @@ class ProxyScriptFetcherImpl : public ProxyScriptFetcher,
// Note that while a request is in progress, we will be holding a reference
// to |url_request_context|. Be careful not to create cycles between the
// fetcher and the context; you can break such cycles by calling Cancel().
- explicit ProxyScriptFetcherImpl(net::URLRequestContext* url_request_context);
+ explicit ProxyScriptFetcherImpl(URLRequestContext* url_request_context);
virtual ~ProxyScriptFetcherImpl();
@@ -42,16 +42,16 @@ class ProxyScriptFetcherImpl : public ProxyScriptFetcher,
virtual int Fetch(const GURL& url, string16* text,
CompletionCallback* callback);
virtual void Cancel();
- virtual net::URLRequestContext* GetRequestContext();
+ virtual URLRequestContext* GetRequestContext();
- // net::URLRequest::Delegate methods:
- virtual void OnAuthRequired(net::URLRequest* request,
+ // URLRequest::Delegate methods:
+ virtual void OnAuthRequired(URLRequest* request,
AuthChallengeInfo* auth_info);
- virtual void OnSSLCertificateError(net::URLRequest* request, int cert_error,
+ virtual void OnSSLCertificateError(URLRequest* request, int cert_error,
X509Certificate* cert);
- virtual void OnResponseStarted(net::URLRequest* request);
- virtual void OnReadCompleted(net::URLRequest* request, int num_bytes);
- virtual void OnResponseCompleted(net::URLRequest* request);
+ virtual void OnResponseStarted(URLRequest* request);
+ virtual void OnReadCompleted(URLRequest* request, int num_bytes);
+ virtual void OnResponseCompleted(URLRequest* request);
// Used by unit-tests to modify the default limits.
base::TimeDelta SetTimeoutConstraint(base::TimeDelta timeout);
@@ -59,11 +59,11 @@ class ProxyScriptFetcherImpl : public ProxyScriptFetcher,
private:
// Read more bytes from the response.
- void ReadBody(net::URLRequest* request);
+ void ReadBody(URLRequest* request);
// Handles a response from Read(). Returns true if we should continue trying
// to read. |num_bytes| is 0 for EOF, and < 0 on errors.
- bool ConsumeBytesRead(net::URLRequest* request, int num_bytes);
+ bool ConsumeBytesRead(URLRequest* request, int num_bytes);
// Called once the request has completed to notify the caller of
// |response_code_| and |response_text_|.
@@ -80,17 +80,17 @@ class ProxyScriptFetcherImpl : public ProxyScriptFetcher,
ScopedRunnableMethodFactory<ProxyScriptFetcherImpl> task_factory_;
// The context used for making network requests.
- net::URLRequestContext* url_request_context_;
+ URLRequestContext* url_request_context_;
- // Buffer that net::URLRequest writes into.
+ // Buffer that URLRequest writes into.
enum { kBufSize = 4096 };
- scoped_refptr<net::IOBuffer> buf_;
+ scoped_refptr<IOBuffer> buf_;
// The next ID to use for |cur_request_| (monotonically increasing).
int next_id_;
// The current (in progress) request, or NULL.
- scoped_ptr<net::URLRequest> cur_request_;
+ scoped_ptr<URLRequest> cur_request_;
// State for current request (only valid when |cur_request_| is not NULL):
diff --git a/net/proxy/proxy_script_fetcher_impl_unittest.cc b/net/proxy/proxy_script_fetcher_impl_unittest.cc
index 84b822c..3e068a6 100644
--- a/net/proxy/proxy_script_fetcher_impl_unittest.cc
+++ b/net/proxy/proxy_script_fetcher_impl_unittest.cc
@@ -76,19 +76,19 @@ GURL GetTestFileUrl(const std::string& relpath) {
class ProxyScriptFetcherImplTest : public PlatformTest {
public:
ProxyScriptFetcherImplTest()
- : test_server_(net::TestServer::TYPE_HTTP, FilePath(kDocRoot)) {
+ : test_server_(TestServer::TYPE_HTTP, FilePath(kDocRoot)) {
}
static void SetUpTestCase() {
- net::URLRequest::AllowFileAccess();
+ URLRequest::AllowFileAccess();
}
protected:
- net::TestServer test_server_;
+ TestServer test_server_;
};
TEST_F(ProxyScriptFetcherImplTest, FileUrl) {
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
{ // Fetch a non-existent file.
@@ -116,7 +116,7 @@ TEST_F(ProxyScriptFetcherImplTest, FileUrl) {
TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) {
ASSERT_TRUE(test_server_.Start());
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
{ // Fetch a PAC with mime type "text/plain"
@@ -151,7 +151,7 @@ TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) {
TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) {
ASSERT_TRUE(test_server_.Start());
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
{ // Fetch a PAC which gives a 500 -- FAIL
@@ -177,7 +177,7 @@ TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) {
TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) {
ASSERT_TRUE(test_server_.Start());
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
// Fetch PAC scripts via HTTP with a Content-Disposition header -- should
@@ -194,7 +194,7 @@ TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) {
TEST_F(ProxyScriptFetcherImplTest, NoCache) {
ASSERT_TRUE(test_server_.Start());
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
// Fetch a PAC script whose HTTP headers make it cacheable for 1 hour.
@@ -226,7 +226,7 @@ TEST_F(ProxyScriptFetcherImplTest, NoCache) {
TEST_F(ProxyScriptFetcherImplTest, TooLarge) {
ASSERT_TRUE(test_server_.Start());
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
// Set the maximum response size to 50 bytes.
@@ -267,7 +267,7 @@ TEST_F(ProxyScriptFetcherImplTest, TooLarge) {
TEST_F(ProxyScriptFetcherImplTest, Hang) {
ASSERT_TRUE(test_server_.Start());
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
// Set the timeout period to 0.5 seconds.
@@ -305,7 +305,7 @@ TEST_F(ProxyScriptFetcherImplTest, Hang) {
TEST_F(ProxyScriptFetcherImplTest, Encodings) {
ASSERT_TRUE(test_server_.Start());
- scoped_refptr<net::URLRequestContext> context(new RequestContext);
+ scoped_refptr<URLRequestContext> context(new RequestContext);
ProxyScriptFetcherImpl pac_fetcher(context);
// Test a response that is gzip-encoded -- should get inflated.
diff --git a/net/proxy/proxy_service_unittest.cc b/net/proxy/proxy_service_unittest.cc
index aefd5a3..95f8bbc 100644
--- a/net/proxy/proxy_service_unittest.cc
+++ b/net/proxy/proxy_service_unittest.cc
@@ -98,7 +98,7 @@ class MockProxyScriptFetcher : public ProxyScriptFetcher {
virtual void Cancel() {}
- virtual net::URLRequestContext* GetRequestContext() { return NULL; }
+ virtual URLRequestContext* GetRequestContext() { return NULL; }
const GURL& pending_request_url() const {
return pending_request_url_;
@@ -132,7 +132,7 @@ TEST(ProxyServiceTest, Direct) {
EXPECT_TRUE(info.is_direct());
// Check the NetLog was filled correctly.
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_EQ(3u, entries.size());
@@ -179,7 +179,7 @@ TEST(ProxyServiceTest, PAC) {
EXPECT_EQ("foopy:80", info.proxy_server().ToURI());
// Check the NetLog was filled correctly.
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_EQ(5u, entries.size());
@@ -1157,7 +1157,7 @@ TEST(ProxyServiceTest, CancelWhilePACFetching) {
EXPECT_FALSE(callback1.have_result()); // Cancelled.
EXPECT_FALSE(callback2.have_result()); // Cancelled.
- net::CapturingNetLog::EntryList entries1;
+ CapturingNetLog::EntryList entries1;
log1.GetEntries(&entries1);
// Check the NetLog for request 1 (which was cancelled) got filled properly.
@@ -1690,7 +1690,7 @@ TEST(ProxyServiceTest, NetworkChangeTriggersPacRefetch) {
// In particular, PROXY_CONFIG_CHANGED should have only been emitted once
// (for the initial setup), and NOT a second time when the IP address
// changed.
- net::CapturingNetLog::EntryList entries;
+ CapturingNetLog::EntryList entries;
log.GetEntries(&entries);
EXPECT_TRUE(LogContainsEntryWithType(entries, 0,