diff options
author | tfarina@chromium.org <tfarina@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-12-12 09:59:20 +0000 |
---|---|---|
committer | tfarina@chromium.org <tfarina@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-12-12 09:59:20 +0000 |
commit | 300bb2a5b1f4b80a211cf1017b0cc7f900880421 (patch) | |
tree | 00714062df0e1be73fee437a27fa91b932dea219 /net | |
parent | 03ea3c52ca254436886a0372eb9fb4b1eb8aefd9 (diff) | |
download | chromium_src-300bb2a5b1f4b80a211cf1017b0cc7f900880421.zip chromium_src-300bb2a5b1f4b80a211cf1017b0cc7f900880421.tar.gz chromium_src-300bb2a5b1f4b80a211cf1017b0cc7f900880421.tar.bz2 |
net: Add namespace net to URLRequestFileJob.
BUG=64263
TEST=trybots
Review URL: http://codereview.chromium.org/5755004
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@68959 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net')
-rw-r--r-- | net/url_request/url_request_file_job.cc | 52 | ||||
-rw-r--r-- | net/url_request/url_request_file_job.h | 20 | ||||
-rw-r--r-- | net/url_request/url_request_job_manager.cc | 2 |
3 files changed, 41 insertions, 33 deletions
diff --git a/net/url_request/url_request_file_job.cc b/net/url_request/url_request_file_job.cc index 2061849..3d717be 100644 --- a/net/url_request/url_request_file_job.cc +++ b/net/url_request/url_request_file_job.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2006-2010 The Chromium Authors. All rights reserved. +// Copyright (c) 2010 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -40,6 +40,8 @@ #include "base/worker_pool.h" #endif +namespace net { + #if defined(OS_WIN) class URLRequestFileJob::AsyncResolver : public base::RefCountedThreadSafe<URLRequestFileJob::AsyncResolver> { @@ -83,16 +85,16 @@ class URLRequestFileJob::AsyncResolver #endif // static -net::URLRequestJob* URLRequestFileJob::Factory(net::URLRequest* request, - const std::string& scheme) { +URLRequestJob* URLRequestFileJob::Factory(URLRequest* request, + const std::string& scheme) { FilePath file_path; - const bool is_file = net::FileURLToFilePath(request->url(), &file_path); + const bool is_file = FileURLToFilePath(request->url(), &file_path); #if defined(OS_CHROMEOS) // Check file access. if (AccessDisabled(file_path)) - return new URLRequestErrorJob(request, net::ERR_ACCESS_DENIED); + return new URLRequestErrorJob(request, ERR_ACCESS_DENIED); #endif // We need to decide whether to create URLRequestFileJob for file access or @@ -111,9 +113,9 @@ net::URLRequestJob* URLRequestFileJob::Factory(net::URLRequest* request, return new URLRequestFileJob(request, file_path); } -URLRequestFileJob::URLRequestFileJob(net::URLRequest* request, +URLRequestFileJob::URLRequestFileJob(URLRequest* request, const FilePath& file_path) - : net::URLRequestJob(request), + : URLRequestJob(request), file_path_(file_path), ALLOW_THIS_IN_INITIALIZER_LIST( io_callback_(this, &URLRequestFileJob::DidRead)), @@ -166,11 +168,11 @@ void URLRequestFileJob::Kill() { } #endif - net::URLRequestJob::Kill(); + URLRequestJob::Kill(); method_factory_.RevokeAll(); } -bool URLRequestFileJob::ReadRawData(net::IOBuffer* dest, int dest_size, +bool URLRequestFileJob::ReadRawData(IOBuffer* dest, int dest_size, int *bytes_read) { DCHECK_NE(dest_size, 0); DCHECK(bytes_read); @@ -196,7 +198,7 @@ bool URLRequestFileJob::ReadRawData(net::IOBuffer* dest, int dest_size, } // Otherwise, a read error occured. We may just need to wait... - if (rv == net::ERR_IO_PENDING) { + if (rv == ERR_IO_PENDING) { SetStatus(URLRequestStatus(URLRequestStatus::IO_PENDING, 0)); } else { NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv)); @@ -221,16 +223,16 @@ bool URLRequestFileJob::GetMimeType(std::string* mime_type) const { // http://code.google.com/p/chromium/issues/detail?id=59849 base::ThreadRestrictions::ScopedAllowIO allow_io; DCHECK(request_); - return net::GetMimeTypeFromFile(file_path_, mime_type); + return GetMimeTypeFromFile(file_path_, mime_type); } void URLRequestFileJob::SetExtraRequestHeaders( - const net::HttpRequestHeaders& headers) { + const HttpRequestHeaders& headers) { std::string range_header; - if (headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) { + if (headers.GetHeader(HttpRequestHeaders::kRange, &range_header)) { // We only care about "Range" header here. - std::vector<net::HttpByteRange> ranges; - if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) { + std::vector<HttpByteRange> ranges; + if (HttpUtil::ParseRangeHeader(range_header, &ranges)) { if (ranges.size() == 1) { byte_range_ = ranges[0]; } else { @@ -239,7 +241,7 @@ void URLRequestFileJob::SetExtraRequestHeaders( // TODO(hclam): decide whether we want to support multiple range // requests. NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, - net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); + ERR_REQUEST_RANGE_NOT_SATISFIABLE)); } } } @@ -257,7 +259,7 @@ void URLRequestFileJob::DidResolve( is_directory_ = file_info.is_directory; - int rv = net::OK; + int rv = OK; // We use URLRequestFileJob to handle files as well as directories without // trailing slash. // If a directory does not exist, we return ERR_FILE_NOT_FOUND. Otherwise, @@ -267,7 +269,7 @@ void URLRequestFileJob::DidResolve( // So what happens is we append it with trailing slash and redirect it to // FileDirJob where it is resolved as invalid. if (!exists) { - rv = net::ERR_FILE_NOT_FOUND; + rv = ERR_FILE_NOT_FOUND; } else if (!is_directory_) { // URL requests should not block on the disk! // http://code.google.com/p/chromium/issues/detail?id=59849 @@ -279,14 +281,14 @@ void URLRequestFileJob::DidResolve( rv = stream_.Open(file_path_, flags); } - if (rv != net::OK) { + if (rv != OK) { NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv)); return; } if (!byte_range_.ComputeBounds(file_info.size)) { NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, - net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); + ERR_REQUEST_RANGE_NOT_SATISFIABLE)); return; } @@ -298,9 +300,9 @@ void URLRequestFileJob::DidResolve( if (remaining_bytes_ > 0 && byte_range_.first_byte_position() != 0 && byte_range_.first_byte_position() != - stream_.Seek(net::FROM_BEGIN, byte_range_.first_byte_position())) { + stream_.Seek(FROM_BEGIN, byte_range_.first_byte_position())) { NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, - net::ERR_REQUEST_RANGE_NOT_SATISFIABLE)); + ERR_REQUEST_RANGE_NOT_SATISFIABLE)); return; } @@ -352,7 +354,7 @@ bool URLRequestFileJob::IsRedirectResponse(GURL* location, if (!resolved) return false; - *location = net::FilePathToFileURL(new_path); + *location = FilePathToFileURL(new_path); *http_status_code = 301; return true; #else @@ -371,7 +373,7 @@ static const char* const kLocalAccessWhiteList[] = { // static bool URLRequestFileJob::AccessDisabled(const FilePath& file_path) { - if (net::URLRequest::IsFileAccessAllowed()) { // for tests. + if (URLRequest::IsFileAccessAllowed()) { // for tests. return false; } @@ -386,3 +388,5 @@ bool URLRequestFileJob::AccessDisabled(const FilePath& file_path) { return true; } #endif + +} // namespace net diff --git a/net/url_request/url_request_file_job.h b/net/url_request/url_request_file_job.h index 7fb6652..1a09b04 100644 --- a/net/url_request/url_request_file_job.h +++ b/net/url_request/url_request_file_job.h @@ -21,21 +21,23 @@ namespace file_util { struct FileInfo; } +namespace net { + // A request job that handles reading file URLs -class URLRequestFileJob : public net::URLRequestJob { +class URLRequestFileJob : public URLRequestJob { public: - URLRequestFileJob(net::URLRequest* request, const FilePath& file_path); + URLRequestFileJob(URLRequest* request, const FilePath& file_path); virtual void Start(); virtual void Kill(); - virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int* bytes_read); + virtual bool ReadRawData(IOBuffer* buf, int buf_size, int* bytes_read); virtual bool IsRedirectResponse(GURL* location, int* http_status_code); virtual bool GetContentEncodings( std::vector<Filter::FilterType>* encoding_type); virtual bool GetMimeType(std::string* mime_type) const; - virtual void SetExtraRequestHeaders(const net::HttpRequestHeaders& headers); + virtual void SetExtraRequestHeaders(const HttpRequestHeaders& headers); - static net::URLRequest::ProtocolFactory Factory; + static URLRequest::ProtocolFactory Factory; #if defined(OS_CHROMEOS) static bool AccessDisabled(const FilePath& file_path); @@ -51,11 +53,11 @@ class URLRequestFileJob : public net::URLRequestJob { void DidResolve(bool exists, const base::PlatformFileInfo& file_info); void DidRead(int result); - net::CompletionCallbackImpl<URLRequestFileJob> io_callback_; - net::FileStream stream_; + CompletionCallbackImpl<URLRequestFileJob> io_callback_; + FileStream stream_; bool is_directory_; - net::HttpByteRange byte_range_; + HttpByteRange byte_range_; int64 remaining_bytes_; #if defined(OS_WIN) @@ -69,4 +71,6 @@ class URLRequestFileJob : public net::URLRequestJob { DISALLOW_COPY_AND_ASSIGN(URLRequestFileJob); }; +} // namespace net + #endif // NET_URL_REQUEST_URL_REQUEST_FILE_JOB_H_ diff --git a/net/url_request/url_request_job_manager.cc b/net/url_request/url_request_job_manager.cc index 490d8dd..d0813a7 100644 --- a/net/url_request/url_request_job_manager.cc +++ b/net/url_request/url_request_job_manager.cc @@ -31,7 +31,7 @@ struct SchemeToFactory { static const SchemeToFactory kBuiltinFactories[] = { { "http", URLRequestHttpJob::Factory }, { "https", URLRequestHttpJob::Factory }, - { "file", URLRequestFileJob::Factory }, + { "file", net::URLRequestFileJob::Factory }, { "ftp", net::URLRequestFtpJob::Factory }, { "about", net::URLRequestAboutJob::Factory }, { "data", URLRequestDataJob::Factory }, |