summaryrefslogtreecommitdiffstats
path: root/net/url_request
diff options
context:
space:
mode:
Diffstat (limited to 'net/url_request')
-rw-r--r--net/url_request/url_request.cc41
-rw-r--r--net/url_request/url_request.h11
-rw-r--r--net/url_request/url_request_file_job.cc29
-rw-r--r--net/url_request/url_request_file_job.h2
-rw-r--r--net/url_request/url_request_http_job.cc4
-rw-r--r--net/url_request/url_request_http_job.h2
-rw-r--r--net/url_request/url_request_job.h3
7 files changed, 47 insertions, 45 deletions
diff --git a/net/url_request/url_request.cc b/net/url_request/url_request.cc
index bf50655..57ad3a3 100644
--- a/net/url_request/url_request.cc
+++ b/net/url_request/url_request.cc
@@ -24,13 +24,26 @@ using net::UploadData;
using std::string;
using std::wstring;
+namespace {
+
// Max number of http redirects to follow. Same number as gecko.
-static const int kMaxRedirects = 20;
+const int kMaxRedirects = 20;
-static URLRequestJobManager* GetJobManager() {
+URLRequestJobManager* GetJobManager() {
return Singleton<URLRequestJobManager>::get();
}
+// Discard headers which have meaning in POST (Content-Length, Content-Type,
+// Origin).
+void StripPostSpecificHeaders(net::HttpRequestHeaders* headers) {
+ // These are headers that may be attached to a POST.
+ headers->RemoveHeader(net::HttpRequestHeaders::kContentLength);
+ headers->RemoveHeader(net::HttpRequestHeaders::kContentType);
+ headers->RemoveHeader(net::HttpRequestHeaders::kOrigin);
+}
+
+} // namespace
+
///////////////////////////////////////////////////////////////////////////////
// URLRequest
@@ -126,14 +139,13 @@ void URLRequest::SetExtraRequestHeaderByName(const string& name,
void URLRequest::SetExtraRequestHeaders(const string& headers) {
DCHECK(!is_pending_);
- if (headers.empty()) {
- extra_request_headers_.clear();
- } else {
+ extra_request_headers_.Clear();
+ if (!headers.empty()) {
#ifndef NDEBUG
size_t crlf = headers.rfind("\r\n", headers.size() - 1);
DCHECK(crlf != headers.size() - 2) << "headers must not end with CRLF";
#endif
- extra_request_headers_ = headers + "\r\n";
+ extra_request_headers_.AddHeadersFromString(headers);
}
// NOTE: This method will likely become non-trivial once the other setters
@@ -434,18 +446,6 @@ void URLRequest::OrphanJob() {
job_ = NULL;
}
-// static
-std::string URLRequest::StripPostSpecificHeaders(const std::string& headers) {
- // These are headers that may be attached to a POST.
- static const char* const kPostHeaders[] = {
- "content-type",
- "content-length",
- "origin"
- };
- return net::HttpUtil::StripHeaders(
- headers, kPostHeaders, arraysize(kPostHeaders));
-}
-
int URLRequest::Redirect(const GURL& location, int http_status_code) {
if (net_log_.HasListener()) {
net_log_.AddEvent(
@@ -492,10 +492,7 @@ int URLRequest::Redirect(const GURL& location, int http_status_code) {
// the inclusion of a multipart Content-Type header in GET can cause
// problems with some servers:
// http://code.google.com/p/chromium/issues/detail?id=843
- //
- // TODO(eroman): It would be better if this data was structured into
- // specific fields/flags, rather than a stew of extra headers.
- extra_request_headers_ = StripPostSpecificHeaders(extra_request_headers_);
+ StripPostSpecificHeaders(&extra_request_headers_);
}
if (!final_upload_progress_)
diff --git a/net/url_request/url_request.h b/net/url_request/url_request.h
index 438621b..a31c6a2 100644
--- a/net/url_request/url_request.h
+++ b/net/url_request/url_request.h
@@ -19,6 +19,7 @@
#include "net/base/load_states.h"
#include "net/base/net_log.h"
#include "net/base/request_priority.h"
+#include "net/http/http_request_headers.h"
#include "net/http/http_response_info.h"
#include "net/url_request/url_request_status.h"
@@ -337,7 +338,9 @@ class URLRequest {
// by \r\n.
void SetExtraRequestHeaders(const std::string& headers);
- const std::string& extra_request_headers() { return extra_request_headers_; }
+ const net::HttpRequestHeaders& extra_request_headers() const {
+ return extra_request_headers_;
+ }
// Returns the current load state for the request.
net::LoadState GetLoadState() const;
@@ -571,10 +574,6 @@ class URLRequest {
// passed values.
void DoCancel(int os_error, const net::SSLInfo& ssl_info);
- // Discard headers which have meaning in POST (Content-Length, Content-Type,
- // Origin).
- static std::string StripPostSpecificHeaders(const std::string& headers);
-
// Contextual information used for this request (can be NULL). This contains
// most of the dependencies which are shared between requests (disk cache,
// cookie store, socket poool, etc.)
@@ -590,7 +589,7 @@ class URLRequest {
GURL first_party_for_cookies_;
std::string method_; // "GET", "POST", etc. Should be all uppercase.
std::string referrer_;
- std::string extra_request_headers_;
+ net::HttpRequestHeaders extra_request_headers_;
int load_flags_; // Flags indicating the request type for the load;
// expected values are LOAD_* enums above.
diff --git a/net/url_request/url_request_file_job.cc b/net/url_request/url_request_file_job.cc
index eae8d27..585929e 100644
--- a/net/url_request/url_request_file_job.cc
+++ b/net/url_request/url_request_file_job.cc
@@ -188,18 +188,23 @@ bool URLRequestFileJob::GetMimeType(std::string* mime_type) const {
return net::GetMimeTypeFromFile(file_path_, mime_type);
}
-void URLRequestFileJob::SetExtraRequestHeaders(const std::string& headers) {
- // We only care about "Range" header here.
- std::vector<net::HttpByteRange> ranges;
- if (net::HttpUtil::ParseRanges(headers, &ranges)) {
- if (ranges.size() == 1) {
- byte_range_ = ranges[0];
- } else {
- // We don't support multiple range requests in one single URL request,
- // because we need to do multipart encoding here.
- // TODO(hclam): decide whether we want to support multiple range requests.
- NotifyDone(URLRequestStatus(URLRequestStatus::FAILED,
- net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
+void URLRequestFileJob::SetExtraRequestHeaders(
+ const net::HttpRequestHeaders& headers) {
+ std::string range_header;
+ if (headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) {
+ // We only care about "Range" header here.
+ std::vector<net::HttpByteRange> ranges;
+ if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) {
+ if (ranges.size() == 1) {
+ byte_range_ = ranges[0];
+ } else {
+ // We don't support multiple range requests in one single URL request,
+ // because we need to do multipart encoding here.
+ // TODO(hclam): decide whether we want to support multiple range
+ // requests.
+ NotifyDone(URLRequestStatus(URLRequestStatus::FAILED,
+ net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
+ }
}
}
}
diff --git a/net/url_request/url_request_file_job.h b/net/url_request/url_request_file_job.h
index 8cb28bd..aed6859 100644
--- a/net/url_request/url_request_file_job.h
+++ b/net/url_request/url_request_file_job.h
@@ -30,7 +30,7 @@ class URLRequestFileJob : public URLRequestJob {
virtual bool GetContentEncodings(
std::vector<Filter::FilterType>* encoding_type);
virtual bool GetMimeType(std::string* mime_type) const;
- virtual void SetExtraRequestHeaders(const std::string& headers);
+ virtual void SetExtraRequestHeaders(const net::HttpRequestHeaders& headers);
static URLRequest::ProtocolFactory Factory;
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
index 52eda7d..c907a6a 100644
--- a/net/url_request/url_request_http_job.cc
+++ b/net/url_request/url_request_http_job.cc
@@ -131,9 +131,9 @@ void URLRequestHttpJob::SetUpload(net::UploadData* upload) {
}
void URLRequestHttpJob::SetExtraRequestHeaders(
- const std::string& headers) {
+ const net::HttpRequestHeaders& headers) {
DCHECK(!transaction_.get()) << "cannot change once started";
- request_info_.extra_headers.AddHeadersFromString(headers);
+ request_info_.extra_headers.CopyFrom(headers);
}
void URLRequestHttpJob::Start() {
diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h
index 4233c6e..279cdd4 100644
--- a/net/url_request/url_request_http_job.h
+++ b/net/url_request/url_request_http_job.h
@@ -32,7 +32,7 @@ class URLRequestHttpJob : public URLRequestJob {
// URLRequestJob methods:
virtual void SetUpload(net::UploadData* upload);
- virtual void SetExtraRequestHeaders(const std::string& headers);
+ virtual void SetExtraRequestHeaders(const net::HttpRequestHeaders& headers);
virtual void Start();
virtual void Kill();
virtual net::LoadState GetLoadState() const;
diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h
index 0131428..9850f4f 100644
--- a/net/url_request/url_request_job.h
+++ b/net/url_request/url_request_job.h
@@ -17,6 +17,7 @@
namespace net {
class AuthChallengeInfo;
+class HttpRequestHeaders;
class HttpResponseInfo;
class IOBuffer;
class UploadData;
@@ -53,7 +54,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>,
virtual void SetUpload(net::UploadData* upload) { }
// Sets extra request headers for Job types that support request headers.
- virtual void SetExtraRequestHeaders(const std::string& headers) { }
+ virtual void SetExtraRequestHeaders(const net::HttpRequestHeaders& headers) {}
// If any error occurs while starting the Job, NotifyStartError should be
// called.