summaryrefslogtreecommitdiffstats
path: root/net/url_request/url_request_http_job.cc
diff options
context:
space:
mode:
authorjar@google.com <jar@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2008-09-22 18:21:11 +0000
committerjar@google.com <jar@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2008-09-22 18:21:11 +0000
commit0a00775a41bd90f4f01017362bac3aeadd6c953e (patch)
tree26404f7422e9a56f6a955eb14c84d40a14da59ba /net/url_request/url_request_http_job.cc
parent968e176577ac4fe102ce66987eb97e037f67796f (diff)
downloadchromium_src-0a00775a41bd90f4f01017362bac3aeadd6c953e.zip
chromium_src-0a00775a41bd90f4f01017362bac3aeadd6c953e.tar.gz
chromium_src-0a00775a41bd90f4f01017362bac3aeadd6c953e.tar.bz2
Rollback 2444 and 2443 to get linux and mac prep
Also need to augment sln dependencies tbr Review URL: http://codereview.chromium.org/3188 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@2446 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request/url_request_http_job.cc')
-rw-r--r--net/url_request/url_request_http_job.cc59
1 files changed, 5 insertions, 54 deletions
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
index 094acd8..8316e73 100644
--- a/net/url_request/url_request_http_job.cc
+++ b/net/url_request/url_request_http_job.cc
@@ -5,15 +5,12 @@
#include "net/url_request/url_request_http_job.h"
#include "base/compiler_specific.h"
-#include "base/file_util.h"
-#include "base/file_version_info.h"
#include "base/message_loop.h"
#include "base/string_util.h"
#include "net/base/cookie_monster.h"
#include "net/base/load_flags.h"
#include "net/base/net_errors.h"
#include "net/base/net_util.h"
-#include "net/base/sdch_manager.h"
#include "net/http/http_response_info.h"
#include "net/http/http_transaction.h"
#include "net/http/http_transaction_factory.h"
@@ -162,20 +159,15 @@ int URLRequestHttpJob::GetResponseCode() {
return response_info_->headers->response_code();
}
-bool URLRequestHttpJob::GetContentEncodings(
- std::vector<std::string>* encoding_types) {
+bool URLRequestHttpJob::GetContentEncoding(std::string* encoding_type) {
DCHECK(transaction_);
if (!response_info_)
return false;
- std::string encoding_type;
- void* iter = NULL;
- while (response_info_->headers->EnumerateHeader(&iter, "Content-Encoding",
- &encoding_type)) {
- encoding_types->push_back(encoding_type);
- }
- return !encoding_types->empty();
+ // TODO(darin): what if there are multiple content encodings?
+ return response_info_->headers->EnumerateHeader(NULL, "Content-Encoding",
+ encoding_type);
}
bool URLRequestHttpJob::IsRedirectResponse(GURL* location,
@@ -421,23 +413,6 @@ void URLRequestHttpJob::NotifyHeadersComplete() {
}
}
- // Get list of SDCH dictionary requests, and schedule them to be loaded.
- if (SdchManager::Global()->IsInSupportedDomain(request_->url())) {
- static const std::string name = "Get-Dictionary";
- std::string url_text;
- void* iter = NULL;
- // TODO(jar): We need to not fetch dictionaries the first time they are
- // seen, but rather wait until we can justify their usefulness.
- // For now, we will only fetch the first dictionary, which will at least
- // require multiple suggestions before we get additional ones for this site.
- // Eventually we should wait until a dictionary is requested several times
- // before we even download it (so that we don't waste memory or bandwidth).
- if (response_info_->headers->EnumerateHeader(&iter, name, &url_text)) {
- GURL dictionary_url = request_->url().Resolve(url_text);
- SdchManager::Global()->FetchDictionary(request_->url(), dictionary_url);
- }
- }
-
URLRequestJob::NotifyHeadersComplete();
}
@@ -501,32 +476,8 @@ void URLRequestHttpJob::AddExtraHeaders() {
context->accept_charset() + "\r\n";
}
- if (!SdchManager::Global()->IsInSupportedDomain(request_->url())) {
- // Tell the server what compression formats we support (other than SDCH).
- request_info_.extra_headers += "Accept-Encoding: gzip,deflate,bzip2\r\n";
- return;
- }
-
- // Supply SDCH related headers, as well as accepting that encoding.
-
- // TODO(jar): See if it is worth optimizing away these bytes when the URL is
- // probably an img or such. (and SDCH encoding is not likely).
- std::string avail_dictionaries;
- SdchManager::Global()->GetAvailDictionaryList(request_->url(),
- &avail_dictionaries);
- if (!avail_dictionaries.empty())
- request_info_.extra_headers += "Avail-Dictionary: "
- + avail_dictionaries + "\r\n";
-
- scoped_ptr<FileVersionInfo> file_version_info(
- FileVersionInfo::CreateFileVersionInfoForCurrentModule());
- request_info_.extra_headers += "X-SDCH: Chrome ";
- request_info_.extra_headers +=
- WideToASCII(file_version_info->product_version());
- request_info_.extra_headers += "\r\n";
-
// Tell the server what compression formats we support.
- request_info_.extra_headers += "Accept-Encoding: gzip,deflate,bzip2,sdch\r\n";
+ request_info_.extra_headers += "Accept-Encoding: gzip,deflate,bzip2\r\n";
}
void URLRequestHttpJob::FetchResponseCookies() {