diff options
author | tfarina@chromium.org <tfarina@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-01-19 11:48:19 +0000 |
---|---|---|
committer | tfarina@chromium.org <tfarina@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-01-19 11:48:19 +0000 |
commit | da968bc16c3a459b350e6b941af07c99ed201764 (patch) | |
tree | 2b545b1631ace146fde83064dbad50b15a65776d /net/proxy/proxy_script_fetcher_impl.cc | |
parent | d2ad7b4cf05551da4e832a6cc038f57e308ba8ca (diff) | |
download | chromium_src-da968bc16c3a459b350e6b941af07c99ed201764.zip chromium_src-da968bc16c3a459b350e6b941af07c99ed201764.tar.gz chromium_src-da968bc16c3a459b350e6b941af07c99ed201764.tar.bz2 |
net: Remove prefix net:: from some places that already are in namespace net.
This should address the wtc review in http://codereview.chromium.org/6338002/
BUG=64263
TEST=trybots
Review URL: http://codereview.chromium.org/6322003
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@71787 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/proxy/proxy_script_fetcher_impl.cc')
-rw-r--r-- | net/proxy/proxy_script_fetcher_impl.cc | 34 |
1 files changed, 17 insertions, 17 deletions
diff --git a/net/proxy/proxy_script_fetcher_impl.cc b/net/proxy/proxy_script_fetcher_impl.cc index 0724127..7d22ed3 100644 --- a/net/proxy/proxy_script_fetcher_impl.cc +++ b/net/proxy/proxy_script_fetcher_impl.cc @@ -70,10 +70,10 @@ void ConvertResponseToUTF16(const std::string& charset, } // namespace ProxyScriptFetcherImpl::ProxyScriptFetcherImpl( - net::URLRequestContext* url_request_context) + URLRequestContext* url_request_context) : ALLOW_THIS_IN_INITIALIZER_LIST(task_factory_(this)), url_request_context_(url_request_context), - buf_(new net::IOBuffer(kBufSize)), + buf_(new IOBuffer(kBufSize)), next_id_(0), cur_request_(NULL), cur_request_id_(0), @@ -86,7 +86,7 @@ ProxyScriptFetcherImpl::ProxyScriptFetcherImpl( } ProxyScriptFetcherImpl::~ProxyScriptFetcherImpl() { - // The net::URLRequest's destructor will cancel the outstanding request, and + // The URLRequest's destructor will cancel the outstanding request, and // ensure that the delegate (this) is not called again. } @@ -99,7 +99,7 @@ int ProxyScriptFetcherImpl::Fetch(const GURL& url, DCHECK(callback); DCHECK(text); - cur_request_.reset(new net::URLRequest(url, this)); + cur_request_.reset(new URLRequest(url, this)); cur_request_->set_context(url_request_context_); cur_request_->set_method("GET"); @@ -129,16 +129,16 @@ int ProxyScriptFetcherImpl::Fetch(const GURL& url, } void ProxyScriptFetcherImpl::Cancel() { - // ResetCurRequestState will free the net::URLRequest, which will cause + // ResetCurRequestState will free the URLRequest, which will cause // cancellation. ResetCurRequestState(); } -net::URLRequestContext* ProxyScriptFetcherImpl::GetRequestContext() { +URLRequestContext* ProxyScriptFetcherImpl::GetRequestContext() { return url_request_context_; } -void ProxyScriptFetcherImpl::OnAuthRequired(net::URLRequest* request, +void ProxyScriptFetcherImpl::OnAuthRequired(URLRequest* request, AuthChallengeInfo* auth_info) { DCHECK_EQ(request, cur_request_.get()); // TODO(eroman): @@ -147,7 +147,7 @@ void ProxyScriptFetcherImpl::OnAuthRequired(net::URLRequest* request, request->CancelAuth(); } -void ProxyScriptFetcherImpl::OnSSLCertificateError(net::URLRequest* request, +void ProxyScriptFetcherImpl::OnSSLCertificateError(URLRequest* request, int cert_error, X509Certificate* cert) { DCHECK_EQ(request, cur_request_.get()); @@ -157,7 +157,7 @@ void ProxyScriptFetcherImpl::OnSSLCertificateError(net::URLRequest* request, request->Cancel(); } -void ProxyScriptFetcherImpl::OnResponseStarted(net::URLRequest* request) { +void ProxyScriptFetcherImpl::OnResponseStarted(URLRequest* request) { DCHECK_EQ(request, cur_request_.get()); if (!request->status().is_success()) { @@ -191,7 +191,7 @@ void ProxyScriptFetcherImpl::OnResponseStarted(net::URLRequest* request) { ReadBody(request); } -void ProxyScriptFetcherImpl::OnReadCompleted(net::URLRequest* request, +void ProxyScriptFetcherImpl::OnReadCompleted(URLRequest* request, int num_bytes) { DCHECK_EQ(request, cur_request_.get()); if (ConsumeBytesRead(request, num_bytes)) { @@ -200,7 +200,7 @@ void ProxyScriptFetcherImpl::OnReadCompleted(net::URLRequest* request, } } -void ProxyScriptFetcherImpl::OnResponseCompleted(net::URLRequest* request) { +void ProxyScriptFetcherImpl::OnResponseCompleted(URLRequest* request) { DCHECK_EQ(request, cur_request_.get()); // Use |result_code_| as the request's error if we have already set it to @@ -211,7 +211,7 @@ void ProxyScriptFetcherImpl::OnResponseCompleted(net::URLRequest* request) { FetchCompleted(); } -void ProxyScriptFetcherImpl::ReadBody(net::URLRequest* request) { +void ProxyScriptFetcherImpl::ReadBody(URLRequest* request) { // Read as many bytes as are available synchronously. while (true) { int num_bytes; @@ -226,7 +226,7 @@ void ProxyScriptFetcherImpl::ReadBody(net::URLRequest* request) { } } -bool ProxyScriptFetcherImpl::ConsumeBytesRead(net::URLRequest* request, +bool ProxyScriptFetcherImpl::ConsumeBytesRead(URLRequest* request, int num_bytes) { if (num_bytes <= 0) { // Error while reading, or EOF. @@ -260,9 +260,9 @@ void ProxyScriptFetcherImpl::FetchCompleted() { int result_code = result_code_; CompletionCallback* callback = callback_; - // Hold a reference to the net::URLRequestContext to prevent re-entrancy from - // ~net::URLRequestContext. - scoped_refptr<net::URLRequestContext> context(cur_request_->context()); + // Hold a reference to the URLRequestContext to prevent re-entrancy from + // ~URLRequestContext. + scoped_refptr<URLRequestContext> context(cur_request_->context()); ResetCurRequestState(); callback->Run(result_code); @@ -277,7 +277,7 @@ void ProxyScriptFetcherImpl::ResetCurRequestState() { } void ProxyScriptFetcherImpl::OnTimeout(int id) { - // Timeout tasks may outlive the net::URLRequest they reference. Make sure it + // Timeout tasks may outlive the URLRequest they reference. Make sure it // is still applicable. if (cur_request_id_ != id) return; |