diff options
author | jar@google.com <jar@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2008-09-23 01:18:16 +0000 |
---|---|---|
committer | jar@google.com <jar@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2008-09-23 01:18:16 +0000 |
commit | 6088942a9ddd522ed237b805641138f853fc3a7f (patch) | |
tree | 38f2078177a0e30fecf4edeaacdda5704a067607 /net/url_request | |
parent | 9516babc4a834ec72508b35fb86741f9254f5419 (diff) | |
download | chromium_src-6088942a9ddd522ed237b805641138f853fc3a7f.zip chromium_src-6088942a9ddd522ed237b805641138f853fc3a7f.tar.gz chromium_src-6088942a9ddd522ed237b805641138f853fc3a7f.tar.bz2 |
Re-land SDCH filter support experiment
Fix up solution files for webkit and net
Add one line keyword change to help linux build
r=hunanr,openvcdiff,nsylvain
Review URL: http://codereview.chromium.org/4026
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@2469 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r-- | net/url_request/url_request_http_job.cc | 59 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.h | 20 | ||||
-rw-r--r-- | net/url_request/url_request_job.cc | 10 | ||||
-rw-r--r-- | net/url_request/url_request_job.h | 18 |
4 files changed, 85 insertions, 22 deletions
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc index 8316e73..094acd8 100644 --- a/net/url_request/url_request_http_job.cc +++ b/net/url_request/url_request_http_job.cc @@ -5,12 +5,15 @@ #include "net/url_request/url_request_http_job.h" #include "base/compiler_specific.h" +#include "base/file_util.h" +#include "base/file_version_info.h" #include "base/message_loop.h" #include "base/string_util.h" #include "net/base/cookie_monster.h" #include "net/base/load_flags.h" #include "net/base/net_errors.h" #include "net/base/net_util.h" +#include "net/base/sdch_manager.h" #include "net/http/http_response_info.h" #include "net/http/http_transaction.h" #include "net/http/http_transaction_factory.h" @@ -159,15 +162,20 @@ int URLRequestHttpJob::GetResponseCode() { return response_info_->headers->response_code(); } -bool URLRequestHttpJob::GetContentEncoding(std::string* encoding_type) { +bool URLRequestHttpJob::GetContentEncodings( + std::vector<std::string>* encoding_types) { DCHECK(transaction_); if (!response_info_) return false; - // TODO(darin): what if there are multiple content encodings? - return response_info_->headers->EnumerateHeader(NULL, "Content-Encoding", - encoding_type); + std::string encoding_type; + void* iter = NULL; + while (response_info_->headers->EnumerateHeader(&iter, "Content-Encoding", + &encoding_type)) { + encoding_types->push_back(encoding_type); + } + return !encoding_types->empty(); } bool URLRequestHttpJob::IsRedirectResponse(GURL* location, @@ -413,6 +421,23 @@ void URLRequestHttpJob::NotifyHeadersComplete() { } } + // Get list of SDCH dictionary requests, and schedule them to be loaded. + if (SdchManager::Global()->IsInSupportedDomain(request_->url())) { + static const std::string name = "Get-Dictionary"; + std::string url_text; + void* iter = NULL; + // TODO(jar): We need to not fetch dictionaries the first time they are + // seen, but rather wait until we can justify their usefulness. + // For now, we will only fetch the first dictionary, which will at least + // require multiple suggestions before we get additional ones for this site. + // Eventually we should wait until a dictionary is requested several times + // before we even download it (so that we don't waste memory or bandwidth). + if (response_info_->headers->EnumerateHeader(&iter, name, &url_text)) { + GURL dictionary_url = request_->url().Resolve(url_text); + SdchManager::Global()->FetchDictionary(request_->url(), dictionary_url); + } + } + URLRequestJob::NotifyHeadersComplete(); } @@ -476,8 +501,32 @@ void URLRequestHttpJob::AddExtraHeaders() { context->accept_charset() + "\r\n"; } + if (!SdchManager::Global()->IsInSupportedDomain(request_->url())) { + // Tell the server what compression formats we support (other than SDCH). + request_info_.extra_headers += "Accept-Encoding: gzip,deflate,bzip2\r\n"; + return; + } + + // Supply SDCH related headers, as well as accepting that encoding. + + // TODO(jar): See if it is worth optimizing away these bytes when the URL is + // probably an img or such. (and SDCH encoding is not likely). + std::string avail_dictionaries; + SdchManager::Global()->GetAvailDictionaryList(request_->url(), + &avail_dictionaries); + if (!avail_dictionaries.empty()) + request_info_.extra_headers += "Avail-Dictionary: " + + avail_dictionaries + "\r\n"; + + scoped_ptr<FileVersionInfo> file_version_info( + FileVersionInfo::CreateFileVersionInfoForCurrentModule()); + request_info_.extra_headers += "X-SDCH: Chrome "; + request_info_.extra_headers += + WideToASCII(file_version_info->product_version()); + request_info_.extra_headers += "\r\n"; + // Tell the server what compression formats we support. - request_info_.extra_headers += "Accept-Encoding: gzip,deflate,bzip2\r\n"; + request_info_.extra_headers += "Accept-Encoding: gzip,deflate,bzip2,sdch\r\n"; } void URLRequestHttpJob::FetchResponseCookies() { diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h index 9312e35..908db2c 100644 --- a/net/url_request/url_request_http_job.h +++ b/net/url_request/url_request_http_job.h @@ -2,8 +2,11 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#ifndef NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H__ -#define NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H__ +#ifndef NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H_ +#define NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H_ + +#include <string> +#include <vector> #include "net/base/completion_callback.h" #include "net/http/http_request_info.h" @@ -24,7 +27,7 @@ class URLRequestHttpJob : public URLRequestJob { virtual ~URLRequestHttpJob(); protected: - URLRequestHttpJob(URLRequest* request); + explicit URLRequestHttpJob(URLRequest* request); // URLRequestJob methods: virtual void SetUpload(net::UploadData* upload); @@ -38,7 +41,7 @@ class URLRequestHttpJob : public URLRequestJob { virtual void GetResponseInfo(net::HttpResponseInfo* info); virtual bool GetResponseCookies(std::vector<std::string>* cookies); virtual int GetResponseCode(); - virtual bool GetContentEncoding(std::string* encoding_type); + virtual bool GetContentEncodings(std::vector<std::string>* encoding_type); virtual bool IsRedirectResponse(GURL* location, int* http_status_code); virtual bool IsSafeRedirect(const GURL& location); virtual bool NeedsAuth(); @@ -77,12 +80,11 @@ class URLRequestHttpJob : public URLRequestJob { bool read_in_progress_; - // Keep a reference to the url request context to be sure it's not - // deleted before us. + // Keep a reference to the url request context to be sure it's not deleted + // before us. scoped_refptr<URLRequestContext> context_; - DISALLOW_EVIL_CONSTRUCTORS(URLRequestHttpJob); + DISALLOW_COPY_AND_ASSIGN(URLRequestHttpJob); }; -#endif // NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H__ - +#endif // NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H_ diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc index b0517ba..0501286 100644 --- a/net/url_request/url_request_job.cc +++ b/net/url_request/url_request_job.cc @@ -47,11 +47,13 @@ void URLRequestJob::DetachRequest() { } void URLRequestJob::SetupFilter() { - std::string encoding_type; - if (GetContentEncoding(&encoding_type)) { + std::vector<std::string> encoding_types; + if (GetContentEncodings(&encoding_types)) { std::string mime_type; GetMimeType(&mime_type); - filter_.reset(Filter::Factory(encoding_type, mime_type, kFilterBufSize)); + filter_.reset(Filter::Factory(encoding_types, mime_type, kFilterBufSize)); + if (filter_.get()) + filter_->SetURL(request_->url()); } } @@ -174,7 +176,7 @@ bool URLRequestJob::ReadFilteredData(int *bytes_read) { // Get filtered data int filtered_data_len = read_buffer_len_; Filter::FilterStatus status; - status = filter_->ReadFilteredData(read_buffer_, &filtered_data_len); + status = filter_->ReadData(read_buffer_, &filtered_data_len); switch (status) { case Filter::FILTER_DONE: { *bytes_read = filtered_data_len; diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h index b70bbc7..1e4a089 100644 --- a/net/url_request/url_request_job.h +++ b/net/url_request/url_request_job.h @@ -5,6 +5,7 @@ #ifndef NET_URL_REQUEST_URL_REQUEST_JOB_H_ #define NET_URL_REQUEST_URL_REQUEST_JOB_H_ +#include <string> #include <vector> #include "base/basictypes.h" @@ -28,7 +29,7 @@ class URLRequestJobMetrics; // UrlRequestFileJob. class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> { public: - URLRequestJob(URLRequest* request); + explicit URLRequestJob(URLRequest* request); virtual ~URLRequestJob(); // Returns the request that owns this job. THIS POINTER MAY BE NULL if the @@ -44,7 +45,8 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> { // Sets extra request headers for Job types that support request headers. virtual void SetExtraRequestHeaders(const std::string& headers) { } - // If any error occurs while starting the Job, NotifyStartError should be called. + // If any error occurs while starting the Job, NotifyStartError should be + // called. // This helps ensure that all errors follow more similar notification code // paths, which should simplify testing. virtual void Start() = 0; @@ -108,10 +110,18 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> { // Returns the HTTP response code for the request. virtual int GetResponseCode() { return -1; } - // Called to fetch the encoding type for this request. Only makes sense for + // Called to fetch the encoding types for this request. Only makes sense for // some types of requests. Returns true on success. Calling this on a request // that doesn't have or specify an encoding type will return false. - virtual bool GetContentEncoding(std::string* encoding_type) { return false; } + // Returns a array of strings showing the sequential encodings used on the + // content. For example, types[0] = "sdch" and types[1] = gzip, means the + // content was first encoded by sdch, and then encoded by gzip. To decode, + // a series of filters must be applied in the reverse order (in the above + // example, ungzip first, and then sdch expand). + // TODO(jar): Cleaner API would return an array of enums. + virtual bool GetContentEncodings(std::vector<std::string>* encoding_types) { + return false; + } // Called to setup stream filter for this request. An example of filter is // content encoding/decoding. |