summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--base/build/base_unittests.vcproj4
-rw-r--r--chrome/browser/automation/url_request_mock_http_job.cc11
-rw-r--r--chrome/browser/automation/url_request_mock_http_job.h6
-rw-r--r--chrome/browser/automation/url_request_slow_download_job.cc11
-rw-r--r--chrome/browser/automation/url_request_slow_download_job.h7
-rw-r--r--chrome/browser/dom_ui/chrome_url_data_manager.cc4
-rw-r--r--chrome/common/net/url_request_intercept_job.cc2
-rw-r--r--chrome/common/net/url_request_intercept_job.h5
-rw-r--r--net/base/bzip2_filter.cc5
-rw-r--r--net/base/bzip2_filter.h6
-rw-r--r--net/base/bzip2_filter_unittest.cc33
-rw-r--r--net/base/filter.cc64
-rw-r--r--net/base/filter.h73
-rw-r--r--net/base/filter_unittest.h66
-rw-r--r--net/base/gzip_filter.cc5
-rw-r--r--net/base/gzip_filter.h2
-rw-r--r--net/base/gzip_filter_unittest.cc31
-rw-r--r--net/base/sdch_filter.cc9
-rw-r--r--net/base/sdch_filter.h3
-rw-r--r--net/base/sdch_filter_unittest.cc104
-rw-r--r--net/url_request/url_request_about_job.cc2
-rw-r--r--net/url_request/url_request_about_job.h6
-rw-r--r--net/url_request/url_request_file_dir_job.cc2
-rw-r--r--net/url_request/url_request_file_dir_job.h4
-rw-r--r--net/url_request/url_request_file_job.cc2
-rw-r--r--net/url_request/url_request_file_job.h4
-rw-r--r--net/url_request/url_request_ftp_job.cc2
-rw-r--r--net/url_request/url_request_ftp_job.h20
-rw-r--r--net/url_request/url_request_http_job.cc3
-rw-r--r--net/url_request/url_request_http_job.h3
-rw-r--r--net/url_request/url_request_job.cc35
-rw-r--r--net/url_request/url_request_job.h20
-rw-r--r--net/url_request/url_request_simple_job.cc2
-rw-r--r--net/url_request/url_request_simple_job.h6
-rw-r--r--net/url_request/url_request_test_job.cc2
-rw-r--r--net/url_request/url_request_test_job.h6
36 files changed, 405 insertions, 165 deletions
diff --git a/base/build/base_unittests.vcproj b/base/build/base_unittests.vcproj
index 466ebd2..c52e415 100644
--- a/base/build/base_unittests.vcproj
+++ b/base/build/base_unittests.vcproj
@@ -208,6 +208,10 @@
>
</File>
<File
+ RelativePath="..\..\net\base\filter_unittest.h"
+ >
+ </File>
+ <File
RelativePath="..\histogram_unittest.cc"
>
</File>
diff --git a/chrome/browser/automation/url_request_mock_http_job.cc b/chrome/browser/automation/url_request_mock_http_job.cc
index 9c0376f..9220ba7 100644
--- a/chrome/browser/automation/url_request_mock_http_job.cc
+++ b/chrome/browser/automation/url_request_mock_http_job.cc
@@ -61,7 +61,14 @@ URLRequestMockHTTPJob::URLRequestMockHTTPJob(URLRequest* request,
const FilePath& file_path)
: URLRequestFileJob(request, file_path) { }
+// Public virtual version.
void URLRequestMockHTTPJob::GetResponseInfo(net::HttpResponseInfo* info) {
+ // Forward to private const version.
+ GetResponseInfoConst(info);
+}
+
+// Private const version.
+void URLRequestMockHTTPJob::GetResponseInfoConst(net::HttpResponseInfo* info) const {
std::wstring header_file = file_path_.ToWStringHack() + kMockHeaderFileSuffix;
std::string raw_headers;
if (!file_util::ReadFileToString(header_file, &raw_headers))
@@ -72,9 +79,9 @@ void URLRequestMockHTTPJob::GetResponseInfo(net::HttpResponseInfo* info) {
info->headers = new net::HttpResponseHeaders(raw_headers);
}
-bool URLRequestMockHTTPJob::GetMimeType(std::string* mime_type) {
+bool URLRequestMockHTTPJob::GetMimeType(std::string* mime_type) const {
net::HttpResponseInfo info;
- GetResponseInfo(&info);
+ GetResponseInfoConst(&info);
return info.headers && info.headers->GetMimeType(mime_type);
}
diff --git a/chrome/browser/automation/url_request_mock_http_job.h b/chrome/browser/automation/url_request_mock_http_job.h
index 2772f1b..d344e7b 100644
--- a/chrome/browser/automation/url_request_mock_http_job.h
+++ b/chrome/browser/automation/url_request_mock_http_job.h
@@ -7,6 +7,8 @@
#ifndef CHROME_BROWSER_AUTOMATION_URL_REQUEST_MOCK_HTTP_JOB_H__
#define CHROME_BROWSER_AUTOMATION_URL_REQUEST_MOCK_HTTP_JOB_H__
+#include <string>
+
#include "net/url_request/url_request_file_job.h"
class URLRequestMockHTTPJob : public URLRequestFileJob {
@@ -14,7 +16,7 @@ class URLRequestMockHTTPJob : public URLRequestFileJob {
URLRequestMockHTTPJob(URLRequest* request, const FilePath& file_path);
virtual ~URLRequestMockHTTPJob() { }
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
virtual bool GetCharset(std::string* charset);
virtual void GetResponseInfo(net::HttpResponseInfo* info);
@@ -27,6 +29,8 @@ class URLRequestMockHTTPJob : public URLRequestFileJob {
static GURL GetMockUrl(const std::wstring& path);
private:
+ void GetResponseInfoConst(net::HttpResponseInfo* info) const;
+
// This is the file path leading to the root of the directory to use as the
// root of the http server.
static std::wstring base_path_;
diff --git a/chrome/browser/automation/url_request_slow_download_job.cc b/chrome/browser/automation/url_request_slow_download_job.cc
index e185204..7b4f6cb 100644
--- a/chrome/browser/automation/url_request_slow_download_job.cc
+++ b/chrome/browser/automation/url_request_slow_download_job.cc
@@ -131,7 +131,14 @@ void URLRequestSlowDownloadJob::CheckDoneStatus() {
}
}
+// Public virtual version.
void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) {
+ // Forward to private const version.
+ GetResponseInfoConst(info);
+}
+
+// Private const version.
+void URLRequestSlowDownloadJob::GetResponseInfoConst(net::HttpResponseInfo* info) const {
// Send back mock headers.
std::string raw_headers;
if (LowerCaseEqualsASCII(kFinishDownloadUrl,
@@ -156,9 +163,9 @@ void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) {
info->headers = new net::HttpResponseHeaders(raw_headers);
}
-bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) {
+bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const {
net::HttpResponseInfo info;
- GetResponseInfo(&info);
+ GetResponseInfoConst(&info);
return info.headers && info.headers->GetMimeType(mime_type);
}
diff --git a/chrome/browser/automation/url_request_slow_download_job.h b/chrome/browser/automation/url_request_slow_download_job.h
index 847121e..58c1749 100644
--- a/chrome/browser/automation/url_request_slow_download_job.h
+++ b/chrome/browser/automation/url_request_slow_download_job.h
@@ -8,13 +8,14 @@
#ifndef CHROME_BROWSER_AUTOMATION_URL_REQUEST_SLOW_DOWNLOAD_JOB_H__
#define CHROME_BROWSER_AUTOMATION_URL_REQUEST_SLOW_DOWNLOAD_JOB_H__
+#include <string>
#include <vector>
#include "net/url_request/url_request_job.h"
class URLRequestSlowDownloadJob : public URLRequestJob {
public:
- URLRequestSlowDownloadJob(URLRequest* request);
+ explicit URLRequestSlowDownloadJob(URLRequest* request);
virtual ~URLRequestSlowDownloadJob() { }
// Timer callback, used to check to see if we should finish our download and
@@ -23,7 +24,7 @@ class URLRequestSlowDownloadJob : public URLRequestJob {
// URLRequestJob methods
virtual void Start();
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
virtual void GetResponseInfo(net::HttpResponseInfo* info);
virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
@@ -39,6 +40,8 @@ class URLRequestSlowDownloadJob : public URLRequestJob {
static void AddUITestUrls();
private:
+ void GetResponseInfoConst(net::HttpResponseInfo* info) const;
+
// Mark all pending requests to be finished. We keep track of pending
// requests in |kPendingRequests|.
static void FinishPendingRequests();
diff --git a/chrome/browser/dom_ui/chrome_url_data_manager.cc b/chrome/browser/dom_ui/chrome_url_data_manager.cc
index d0e4193..a75af74 100644
--- a/chrome/browser/dom_ui/chrome_url_data_manager.cc
+++ b/chrome/browser/dom_ui/chrome_url_data_manager.cc
@@ -44,7 +44,7 @@ class URLRequestChromeJob : public URLRequestJob {
virtual void Start();
virtual void Kill();
virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
// Called by ChromeURLDataManager to notify us that the data blob is ready
// for us.
@@ -287,7 +287,7 @@ void URLRequestChromeJob::Kill() {
chrome_url_data_manager.RemoveRequest(this);
}
-bool URLRequestChromeJob::GetMimeType(std::string* mime_type) {
+bool URLRequestChromeJob::GetMimeType(std::string* mime_type) const {
*mime_type = mime_type_;
return !mime_type_.empty();
}
diff --git a/chrome/common/net/url_request_intercept_job.cc b/chrome/common/net/url_request_intercept_job.cc
index 81d1a78..733967e 100644
--- a/chrome/common/net/url_request_intercept_job.cc
+++ b/chrome/common/net/url_request_intercept_job.cc
@@ -96,7 +96,7 @@ bool URLRequestInterceptJob::ReadRawData(net::IOBuffer* dest, int dest_size,
return false;
}
-bool URLRequestInterceptJob::GetMimeType(std::string* mime_type) {
+bool URLRequestInterceptJob::GetMimeType(std::string* mime_type) const {
return request_->response_headers()->GetMimeType(mime_type);
}
diff --git a/chrome/common/net/url_request_intercept_job.h b/chrome/common/net/url_request_intercept_job.h
index bc422af..06a9cdd 100644
--- a/chrome/common/net/url_request_intercept_job.h
+++ b/chrome/common/net/url_request_intercept_job.h
@@ -5,6 +5,9 @@
#ifndef CHROME_COMMON_NET_URL_REQUEST_INTERCEPT_JOB_H_
#define CHROME_COMMON_NET_URL_REQUEST_INTERCEPT_JOB_H_
+#include <string>
+
+#include "base/scoped_ptr.h"
#include "net/url_request/url_request_job.h"
#include "chrome/browser/chrome_plugin_host.h"
#include "chrome/common/chrome_plugin_api.h"
@@ -34,7 +37,7 @@ class URLRequestInterceptJob
virtual void Start();
virtual void Kill();
virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int* bytes_read);
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
virtual bool GetCharset(std::string* charset);
virtual void GetResponseInfo(net::HttpResponseInfo* info);
virtual int GetResponseCode();
diff --git a/net/base/bzip2_filter.cc b/net/base/bzip2_filter.cc
index 86ac1ca..5ba9a9f 100644
--- a/net/base/bzip2_filter.cc
+++ b/net/base/bzip2_filter.cc
@@ -5,8 +5,9 @@
#include "base/logging.h"
#include "net/base/bzip2_filter.h"
-BZip2Filter::BZip2Filter()
- : decoding_status_(DECODING_UNINITIALIZED),
+BZip2Filter::BZip2Filter(const FilterContext& filter_context)
+ : Filter(filter_context),
+ decoding_status_(DECODING_UNINITIALIZED),
bzip2_data_stream_(NULL) {
}
diff --git a/net/base/bzip2_filter.h b/net/base/bzip2_filter.h
index 867f1fa..4a21bf4 100644
--- a/net/base/bzip2_filter.h
+++ b/net/base/bzip2_filter.h
@@ -14,8 +14,8 @@
//
// This BZip2Filter internally uses third_party/bzip2 library to do decoding.
//
-// BZip2Filter is also a subclass of Filter. See the latter's header file filter.h
-// for sample usage.
+// BZip2Filter is also a subclass of Filter. See the latter's header file
+// filter.h for sample usage.
#ifndef NET_BASE_BZIP2_FILTER_H_
#define NET_BASE_BZIP2_FILTER_H_
@@ -26,7 +26,7 @@
class BZip2Filter : public Filter {
public:
- BZip2Filter();
+ explicit BZip2Filter(const FilterContext& filter_context);
virtual ~BZip2Filter();
diff --git a/net/base/bzip2_filter_unittest.cc b/net/base/bzip2_filter_unittest.cc
index ae80305..fdc56e9 100644
--- a/net/base/bzip2_filter_unittest.cc
+++ b/net/base/bzip2_filter_unittest.cc
@@ -9,6 +9,7 @@
#include "base/path_service.h"
#include "base/scoped_ptr.h"
#include "net/base/bzip2_filter.h"
+#include "net/base/filter_unittest.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/platform_test.h"
#include "third_party/bzip2/bzlib.h"
@@ -184,7 +185,8 @@ TEST_F(BZip2FilterUnitTest, DecodeBZip2) {
// Decode the compressed data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
memcpy(filter->stream_buffer()->data(), bzip2_encode_buffer_,
bzip2_encode_len_);
@@ -207,7 +209,8 @@ TEST_F(BZip2FilterUnitTest, DecodeBZip2) {
TEST_F(BZip2FilterUnitTest, DecodeWithSmallInputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kSmallBufferSize));
+ MockFilterContext filter_context(kSmallBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
bzip2_encode_buffer_, bzip2_encode_len_,
@@ -218,7 +221,8 @@ TEST_F(BZip2FilterUnitTest, DecodeWithSmallInputBuffer) {
TEST_F(BZip2FilterUnitTest, DecodeWithSmallOutputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
bzip2_encode_buffer_, bzip2_encode_len_,
@@ -232,7 +236,8 @@ TEST_F(BZip2FilterUnitTest, DecodeWithSmallOutputBuffer) {
TEST_F(BZip2FilterUnitTest, DecodeWithOneByteInputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, 1));
+ MockFilterContext filter_context(1);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
bzip2_encode_buffer_, bzip2_encode_len_,
@@ -244,7 +249,8 @@ TEST_F(BZip2FilterUnitTest, DecodeWithOneByteInputBuffer) {
TEST_F(BZip2FilterUnitTest, DecodeWithOneByteInputAndOutputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, 1));
+ MockFilterContext filter_context(1);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
bzip2_encode_buffer_, bzip2_encode_len_, 1, false);
@@ -262,7 +268,8 @@ TEST_F(BZip2FilterUnitTest, DecodeCorruptedData) {
// Decode the correct data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter1(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter1(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter1.get());
Filter::FilterStatus code = DecodeAllWithFilter(filter1.get(),
@@ -275,7 +282,7 @@ TEST_F(BZip2FilterUnitTest, DecodeCorruptedData) {
EXPECT_TRUE(code == Filter::FILTER_DONE);
// Decode the corrupted data with filter
- scoped_ptr<Filter> filter2(Filter::Factory(filter_types, kDefaultBufferSize));
+ scoped_ptr<Filter> filter2(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter2.get());
int pos = corrupt_data_len / 2;
@@ -305,7 +312,8 @@ TEST_F(BZip2FilterUnitTest, DecodeMissingData) {
// Decode the corrupted data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
char corrupt_decode_buffer[kDefaultBufferSize];
int corrupt_decode_size = kDefaultBufferSize;
@@ -330,7 +338,8 @@ TEST_F(BZip2FilterUnitTest, DecodeCorruptedHeader) {
// Decode the corrupted data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
char corrupt_decode_buffer[kDefaultBufferSize];
int corrupt_decode_size = kDefaultBufferSize;
@@ -356,7 +365,8 @@ TEST_F(BZip2FilterUnitTest, DecodeWithExtraDataAndSmallOutputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(),
source_buffer(),
@@ -375,7 +385,8 @@ TEST_F(BZip2FilterUnitTest, DecodeWithExtraDataAndSmallInputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_BZIP2);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kSmallBufferSize));
+ MockFilterContext filter_context(kSmallBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(),
source_buffer(),
diff --git a/net/base/filter.cc b/net/base/filter.cc
index 730aef50..06da53a 100644
--- a/net/base/filter.cc
+++ b/net/base/filter.cc
@@ -37,17 +37,37 @@ const char kTextHtml[] = "text/html";
} // namespace
Filter* Filter::Factory(const std::vector<FilterType>& filter_types,
- int buffer_size) {
- if (filter_types.empty() || buffer_size < 0)
+ const FilterContext& filter_context) {
+ DCHECK(filter_context.GetInputStreambufferSize() > 0);
+ if (filter_types.empty() || filter_context.GetInputStreambufferSize() <= 0)
return NULL;
+
Filter* filter_list = NULL; // Linked list of filters.
for (size_t i = 0; i < filter_types.size(); i++) {
- filter_list = PrependNewFilter(filter_types[i], buffer_size, filter_list);
+ filter_list = PrependNewFilter(filter_types[i], filter_context,
+ filter_list);
if (!filter_list)
return NULL;
}
+ // TODO(jar): These settings should go into the derived classes, on an as-needed basis.
+ std::string mime_type;
+ bool success = filter_context.GetMimeType(&mime_type);
+ DCHECK(success);
+ GURL gurl;
+ success = filter_context.GetURL(&gurl);
+ DCHECK(success);
+ base::Time request_time = filter_context.GetRequestTime();
+ bool is_cached_content = filter_context.IsCachedContent();
+
+ filter_list->SetMimeType(mime_type);
+ filter_list->SetURL(gurl);
+ // Approximate connect time with request_time. If it is not cached, then
+ // this is a good approximation for when the first bytes went on the
+ // wire.
+ filter_list->SetConnectTime(request_time, is_cached_content);
+
return filter_list;
}
@@ -174,15 +194,16 @@ void Filter::FixupEncodingTypes(
}
// static
-Filter* Filter::PrependNewFilter(FilterType type_id, int buffer_size,
+Filter* Filter::PrependNewFilter(FilterType type_id,
+ const FilterContext& filter_context,
Filter* filter_list) {
Filter* first_filter = NULL; // Soon to be start of chain.
switch (type_id) {
case FILTER_TYPE_GZIP_HELPING_SDCH:
case FILTER_TYPE_DEFLATE:
case FILTER_TYPE_GZIP: {
- scoped_ptr<GZipFilter> gz_filter(new GZipFilter());
- if (gz_filter->InitBuffer(buffer_size)) {
+ scoped_ptr<GZipFilter> gz_filter(new GZipFilter(filter_context));
+ if (gz_filter->InitBuffer()) {
if (gz_filter->InitDecoding(type_id)) {
first_filter = gz_filter.release();
}
@@ -190,8 +211,8 @@ Filter* Filter::PrependNewFilter(FilterType type_id, int buffer_size,
break;
}
case FILTER_TYPE_BZIP2: {
- scoped_ptr<BZip2Filter> bzip2_filter(new BZip2Filter());
- if (bzip2_filter->InitBuffer(buffer_size)) {
+ scoped_ptr<BZip2Filter> bzip2_filter(new BZip2Filter(filter_context));
+ if (bzip2_filter->InitBuffer()) {
if (bzip2_filter->InitDecoding(false)) {
first_filter = bzip2_filter.release();
}
@@ -200,8 +221,8 @@ Filter* Filter::PrependNewFilter(FilterType type_id, int buffer_size,
}
case FILTER_TYPE_SDCH:
case FILTER_TYPE_SDCH_POSSIBLE: {
- scoped_ptr<SdchFilter> sdch_filter(new SdchFilter());
- if (sdch_filter->InitBuffer(buffer_size)) {
+ scoped_ptr<SdchFilter> sdch_filter(new SdchFilter(filter_context));
+ if (sdch_filter->InitBuffer()) {
if (sdch_filter->InitDecoding(type_id)) {
first_filter = sdch_filter.release();
}
@@ -213,17 +234,17 @@ Filter* Filter::PrependNewFilter(FilterType type_id, int buffer_size,
}
}
- if (first_filter) {
- first_filter->next_filter_.reset(filter_list);
- } else {
+ if (!first_filter) {
// Cleanup and exit, since we can't construct this filter list.
delete filter_list;
- filter_list = NULL;
+ return NULL;
}
+
+ first_filter->next_filter_.reset(filter_list);
return first_filter;
}
-Filter::Filter()
+Filter::Filter(const FilterContext& filter_context)
: stream_buffer_(NULL),
stream_buffer_size_(0),
next_stream_data_(NULL),
@@ -233,13 +254,16 @@ Filter::Filter()
was_cached_(false),
mime_type_(),
next_filter_(NULL),
- last_status_(FILTER_NEED_MORE_DATA) {
+ last_status_(FILTER_NEED_MORE_DATA),
+ filter_context_(filter_context) {
}
Filter::~Filter() {}
-bool Filter::InitBuffer(int buffer_size) {
- if (buffer_size < 0 || stream_buffer())
+bool Filter::InitBuffer() {
+ int buffer_size = filter_context_.GetInputStreambufferSize();
+ DCHECK(buffer_size > 0);
+ if (buffer_size <= 0 || stream_buffer())
return false;
stream_buffer_ = new net::IOBuffer(buffer_size);
@@ -328,10 +352,12 @@ void Filter::PushDataIntoNextFilter() {
bool Filter::FlushStreamBuffer(int stream_data_len) {
+ DCHECK(stream_data_len <= stream_buffer_size_);
if (stream_data_len <= 0 || stream_data_len > stream_buffer_size_)
return false;
- // bail out if there are more data in the stream buffer to be filtered.
+ DCHECK(stream_buffer());
+ // Bail out if there is more data in the stream buffer to be filtered.
if (!stream_buffer() || stream_data_len_)
return false;
diff --git a/net/base/filter.h b/net/base/filter.h
index 8d07e99..db8e9b2 100644
--- a/net/base/filter.h
+++ b/net/base/filter.h
@@ -39,6 +39,40 @@
#include "googleurl/src/gurl.h"
#include "testing/gtest/include/gtest/gtest_prod.h"
+//------------------------------------------------------------------------------
+// Define an interface class that allows access to contextual information
+// supplied by the owner of this filter.
+class FilterContext {
+ public:
+ FilterContext() {};
+ virtual ~FilterContext() {};
+
+ // What mime type was specified in the header for this data?
+ virtual bool GetMimeType(std::string* mime_type) const = 0;
+
+ // What URL was used to access this data?
+ // Return false if gurl is not present.
+ virtual bool GetURL(GURL* gurl) const = 0;
+
+ // When was this data requested from a server?
+ virtual base::Time GetRequestTime() const = 0;
+
+ // Is data supplied from cache, or fresh across the net?
+ virtual bool IsCachedContent() const = 0;
+
+ // TODO(jar): We could use flags, defined by callers, rather than naming a
+ // protocol here in the base class.
+ // Was this data flagged as a response to a request with an SDCH dictionary?
+ virtual bool IsSdchResponse() const = 0;
+
+ // What is the desirable input buffer size for these filters?
+ virtual int GetInputStreambufferSize() const = 0;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FilterContext);
+};
+
+//------------------------------------------------------------------------------
class Filter {
public:
// Return values of function ReadFilteredData.
@@ -68,25 +102,25 @@ class Filter {
FILTER_TYPE_UNSUPPORTED,
};
-
virtual ~Filter();
// Creates a Filter object.
- // Parameters: Filter_types specifies the type of filter created; Buffer_size
- // specifies the size (in number of chars) of the buffer the filter should
- // allocate to hold pre-filter data.
+ // Parameters: Filter_types specifies the type of filter created;
+ // filter_context allows filters to acquire additional details needed for
+ // construction and operation, such as a specification of requisite input
+ // buffer size.
// If success, the function returns the pointer to the Filter object created.
// If failed or a filter is not needed, the function returns NULL.
//
// Note: filter_types is an array of filter names (content encoding types as
- // provided in an HTTP header), which will be chained together serially do
+ // provided in an HTTP header), which will be chained together serially to do
// successive filtering of data. The names in the vector are ordered based on
// encoding order, and the filters are chained to operate in the reverse
// (decoding) order. For example, types[0] = "sdch", types[1] = "gzip" will
// cause data to first be gunizip filtered, and the resulting output from that
// filter will be sdch decoded.
static Filter* Factory(const std::vector<FilterType>& filter_types,
- int buffer_size);
+ const FilterContext& filter_context);
// External call to obtain data from this filter chain. If ther is no
// next_filter_, then it obtains data from this specific filter.
@@ -101,7 +135,7 @@ class Filter {
// Returns the total number of chars remaining in stream_buffer_ to be
// filtered.
//
- // If the function returns 0 then all data have been filtered and the caller
+ // If the function returns 0 then all data has been filtered, and the caller
// is safe to copy new data into stream_buffer_.
int stream_data_len() const { return stream_data_len_; }
@@ -109,8 +143,8 @@ class Filter {
// stream_buffer_, the caller should call this function to notify Filter to
// start filtering. Then after this function is called, the caller can get
// post-filtered data using ReadFilteredData. The caller must not write to
- // stream_buffer_ and call this function again before stream_buffer_ is empty
- // out by ReadFilteredData.
+ // stream_buffer_ and call this function again before stream_buffer_ is
+ // emptied out by ReadFilteredData.
//
// The input stream_data_len is the length (in number of chars) of valid
// data in stream_buffer_. It can not be greater than stream_buffer_size_.
@@ -140,7 +174,7 @@ class Filter {
const std::string& mime_type,
std::vector<FilterType>* encoding_types);
protected:
- Filter();
+ explicit Filter(const FilterContext& filter_context);
FRIEND_TEST(SdchFilterTest, ContentTypeId);
// Filters the data stored in stream_buffer_ and writes the output into the
@@ -159,16 +193,18 @@ class Filter {
// Copy pre-filter data directly to destination buffer without decoding.
FilterStatus CopyOut(char* dest_buffer, int* dest_len);
- // Allocates and initializes stream_buffer_.
- // Buffer_size is the maximum size of stream_buffer_ in number of chars.
- bool InitBuffer(int buffer_size);
+ // Allocates and initializes stream_buffer_ based on filter_context_.
+ // Establishes a buffer large enough to handle the amount specified in
+ // filter_context_.GetInputStreambufferSize().
+ bool InitBuffer();
// A factory helper for creating filters for within a chain of potentially
// multiple encodings. If a chain of filters is created, then this may be
// called multiple times during the filter creation process. In most simple
// cases, this is only called once. Returns NULL and cleans up (deleting
// filter_list) if a new filter can't be constructed.
- static Filter* PrependNewFilter(FilterType type_id, int buffer_size,
+ static Filter* PrependNewFilter(FilterType type_id,
+ const FilterContext& filter_context,
Filter* filter_list);
FilterStatus last_status() const { return last_status_; }
@@ -177,7 +213,7 @@ class Filter {
bool was_cached() const { return was_cached_; }
- // Buffer to hold the data to be filtered.
+ // Buffer to hold the data to be filtered (the input queue).
scoped_refptr<net::IOBuffer> stream_buffer_;
// Maximum size of stream_buffer_ in number of chars.
@@ -189,6 +225,7 @@ class Filter {
// Total number of remaining chars in stream_buffer_ to be filtered.
int stream_data_len_;
+ private: // TODO(jar): Make more data private by moving this up higher.
// The URL that is currently being filtered.
// This is used by SDCH filters which need to restrict use of a dictionary to
// a specific URL or path.
@@ -200,7 +237,6 @@ class Filter {
base::Time connect_time_;
bool was_cached_;
- private: // TODO(jar): Make more data private by moving this up higher.
// Helper function to empty our output into the next filter's input.
void PushDataIntoNextFilter();
@@ -215,6 +251,11 @@ class Filter {
// chained filters.
FilterStatus last_status_;
+ // Context data from the owner of this filter. Some filters need additional
+ // context information (mime type, etc.) to properly function, and they access
+ // this data via this reference member.
+ const FilterContext& filter_context_;
+
DISALLOW_COPY_AND_ASSIGN(Filter);
};
diff --git a/net/base/filter_unittest.h b/net/base/filter_unittest.h
new file mode 100644
index 0000000..aae9b39
--- /dev/null
+++ b/net/base/filter_unittest.h
@@ -0,0 +1,66 @@
+// Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef NET_BASE_FILTER_UNITTEST_H_
+#define NET_BASE_FILTER_UNITTEST_H_
+
+#include <string>
+
+#include "googleurl/src/gurl.h"
+#include "net/base/filter.h"
+
+//------------------------------------------------------------------------------
+class MockFilterContext : public FilterContext {
+ public:
+ explicit MockFilterContext(int buffer_size) : buffer_size_(buffer_size) {}
+
+ void SetBufferSize(int buffer_size) { buffer_size_ = buffer_size; }
+ void SetMimeType(const std::string& mime_type) { mime_type_ = mime_type; }
+ void SetURL(const GURL& gurl) { gurl_ = gurl; }
+ void SetRequestTime(const base::Time time) { request_time_ = time; }
+ void SetCached(bool is_cached) { is_cached_content_ = is_cached; }
+ void SetSdchResponse(bool is_sdch_response) {
+ is_sdch_response = is_sdch_response;
+ }
+
+ virtual bool GetMimeType(std::string* mime_type) const {
+ *mime_type = mime_type_;
+ return true;
+ }
+
+ // What URL was used to access this data?
+ // Return false if gurl is not present.
+ virtual bool GetURL(GURL* gurl) const {
+ *gurl = gurl_;
+ return true;
+ }
+
+ // What was this data requested from a server?
+ virtual base::Time GetRequestTime() const {
+ return request_time_;
+ }
+
+ // Is data supplied from cache, or fresh across the net?
+ virtual bool IsCachedContent() const { return is_cached_content_; }
+
+ // Was this data flagged as a response to a request with an SDCH dictionary?
+ virtual bool IsSdchResponse() const { return is_sdch_response_; }
+
+ // What is the desirable input buffer size for these filters?
+ virtual int GetInputStreambufferSize() const { return buffer_size_; }
+
+
+ private:
+ int buffer_size_;
+ std::string mime_type_;
+ GURL gurl_;
+ base::Time request_time_;
+ bool is_cached_content_;
+ bool is_sdch_response_;
+
+ DISALLOW_COPY_AND_ASSIGN(MockFilterContext);
+};
+
+#endif // NET_BASE_FILTER_UNITTEST_H_
diff --git a/net/base/gzip_filter.cc b/net/base/gzip_filter.cc
index 79395a2..e0021b4 100644
--- a/net/base/gzip_filter.cc
+++ b/net/base/gzip_filter.cc
@@ -8,8 +8,9 @@
#include "net/base/gzip_header.h"
#include "third_party/zlib/zlib.h"
-GZipFilter::GZipFilter()
- : decoding_status_(DECODING_UNINITIALIZED),
+GZipFilter::GZipFilter(const FilterContext& filter_context)
+ : Filter(filter_context),
+ decoding_status_(DECODING_UNINITIALIZED),
decoding_mode_(DECODE_MODE_UNKNOWN),
gzip_header_status_(GZIP_CHECK_HEADER_IN_PROGRESS),
zlib_header_added_(false),
diff --git a/net/base/gzip_filter.h b/net/base/gzip_filter.h
index f0fc3f2..8043075 100644
--- a/net/base/gzip_filter.h
+++ b/net/base/gzip_filter.h
@@ -23,7 +23,7 @@ typedef struct z_stream_s z_stream;
class GZipFilter : public Filter {
public:
- GZipFilter();
+ explicit GZipFilter(const FilterContext& filter_context);
virtual ~GZipFilter();
diff --git a/net/base/gzip_filter_unittest.cc b/net/base/gzip_filter_unittest.cc
index 654ef40..c656450 100644
--- a/net/base/gzip_filter_unittest.cc
+++ b/net/base/gzip_filter_unittest.cc
@@ -9,6 +9,7 @@
#include "base/path_service.h"
#include "base/scoped_ptr.h"
#include "net/base/gzip_filter.h"
+#include "net/base/filter_unittest.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/platform_test.h"
#include "third_party/zlib/zlib.h"
@@ -230,7 +231,8 @@ TEST_F(GZipUnitTest, DecodeDeflate) {
// Decode the compressed data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_DEFLATE);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
memcpy(filter->stream_buffer()->data(), deflate_encode_buffer_,
deflate_encode_len_);
@@ -250,7 +252,8 @@ TEST_F(GZipUnitTest, DecodeGZip) {
// Decode the compressed data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_GZIP);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
memcpy(filter->stream_buffer()->data(), gzip_encode_buffer_,
gzip_encode_len_);
@@ -275,7 +278,8 @@ TEST_F(GZipUnitTest, DecodeGZipWithMistakenSdch) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
filter_types.push_back(Filter::FILTER_TYPE_GZIP);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
memcpy(filter->stream_buffer()->data(), gzip_encode_buffer_,
gzip_encode_len_);
@@ -296,7 +300,8 @@ TEST_F(GZipUnitTest, DecodeGZipWithMistakenSdch) {
TEST_F(GZipUnitTest, DecodeWithSmallBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_DEFLATE);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kSmallBufferSize));
+ MockFilterContext filter_context(kSmallBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
deflate_encode_buffer_, deflate_encode_len_,
@@ -310,7 +315,8 @@ TEST_F(GZipUnitTest, DecodeWithSmallBuffer) {
TEST_F(GZipUnitTest, DecodeWithOneByteBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_GZIP);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, 1));
+ MockFilterContext filter_context(1);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
gzip_encode_buffer_, gzip_encode_len_,
@@ -321,7 +327,8 @@ TEST_F(GZipUnitTest, DecodeWithOneByteBuffer) {
TEST_F(GZipUnitTest, DecodeWithSmallOutputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_DEFLATE);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
deflate_encode_buffer_, deflate_encode_len_,
@@ -333,7 +340,8 @@ TEST_F(GZipUnitTest, DecodeWithSmallOutputBuffer) {
TEST_F(GZipUnitTest, DecodeWithOneByteInputAndOutputBuffer) {
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_GZIP);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, 1));
+ MockFilterContext filter_context(1);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
DecodeAndCompareWithFilter(filter.get(), source_buffer(), source_len(),
gzip_encode_buffer_, gzip_encode_len_, 1);
@@ -351,7 +359,8 @@ TEST_F(GZipUnitTest, DecodeCorruptedData) {
// Decode the corrupted data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_DEFLATE);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
char corrupt_decode_buffer[kDefaultBufferSize];
int corrupt_decode_size = kDefaultBufferSize;
@@ -377,7 +386,8 @@ TEST_F(GZipUnitTest, DecodeMissingData) {
// Decode the corrupted data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_DEFLATE);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
char corrupt_decode_buffer[kDefaultBufferSize];
int corrupt_decode_size = kDefaultBufferSize;
@@ -400,7 +410,8 @@ TEST_F(GZipUnitTest, DecodeCorruptedHeader) {
// Decode the corrupted data with filter
std::vector<Filter::FilterType> filter_types;
filter_types.push_back(Filter::FILTER_TYPE_GZIP);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kDefaultBufferSize));
+ MockFilterContext filter_context(kDefaultBufferSize);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
ASSERT_TRUE(filter.get());
char corrupt_decode_buffer[kDefaultBufferSize];
int corrupt_decode_size = kDefaultBufferSize;
diff --git a/net/base/sdch_filter.cc b/net/base/sdch_filter.cc
index 2d44421..5040395 100644
--- a/net/base/sdch_filter.cc
+++ b/net/base/sdch_filter.cc
@@ -14,8 +14,9 @@
#include "sdch/open-vcdiff/src/google/vcdecoder.h"
-SdchFilter::SdchFilter()
- : decoding_status_(DECODING_UNINITIALIZED),
+SdchFilter::SdchFilter(const FilterContext& filter_context)
+ : Filter(filter_context),
+ decoding_status_(DECODING_UNINITIALIZED),
vcdiff_streaming_decoder_(NULL),
dictionary_hash_(),
dictionary_hash_is_plausible_(false),
@@ -182,7 +183,7 @@ Filter::FilterStatus SdchFilter::ReadFilteredData(char* dest_buffer,
// that is sufficiently unlikely that we ignore it.
if (std::string::npos == mime_type().find_first_of("text/html")) {
SdchManager::BlacklistDomainForever(url());
- if (was_cached_)
+ if (was_cached())
SdchManager::SdchErrorRecovery(
SdchManager::CACHED_META_REFRESH_UNSUPPORTED);
else
@@ -192,7 +193,7 @@ Filter::FilterStatus SdchFilter::ReadFilteredData(char* dest_buffer,
}
// HTML content means we can issue a meta-refresh, and get the content
// again, perhaps without SDCH (to be safe).
- if (was_cached_) {
+ if (was_cached()) {
// Cached content is probably a startup tab, so we'll just get fresh
// content and try again, without disabling sdch.
SdchManager::SdchErrorRecovery(
diff --git a/net/base/sdch_filter.h b/net/base/sdch_filter.h
index d8ed354..c8cf7f4 100644
--- a/net/base/sdch_filter.h
+++ b/net/base/sdch_filter.h
@@ -15,6 +15,7 @@
#define NET_BASE_SDCH_FILTER_H_
#include <string>
+#include <vector>
#include "base/scoped_ptr.h"
#include "net/base/filter.h"
@@ -28,7 +29,7 @@ namespace open_vcdiff {
class SdchFilter : public Filter {
public:
- SdchFilter();
+ explicit SdchFilter(const FilterContext& filter_context);
virtual ~SdchFilter();
diff --git a/net/base/sdch_filter_unittest.cc b/net/base/sdch_filter_unittest.cc
index b3d704e..1b9b5b6 100644
--- a/net/base/sdch_filter_unittest.cc
+++ b/net/base/sdch_filter_unittest.cc
@@ -11,6 +11,7 @@
#include "base/logging.h"
#include "base/scoped_ptr.h"
#include "net/base/filter.h"
+#include "net/base/filter_unittest.h"
#include "net/base/sdch_filter.h"
#include "net/url_request/url_request_http_job.cc"
#include "testing/gtest/include/gtest/gtest.h"
@@ -151,9 +152,10 @@ TEST_F(SdchFilterTest, BasicBadDictionary) {
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
const int kInputBufferSize(30);
char output_buffer[20];
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ MockFilterContext filter_context(kInputBufferSize);
std::string url_string("http://ignore.com");
- filter->SetURL(GURL(url_string));
+ filter_context.SetURL(GURL(url_string));
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
// With no input data, try to read output.
@@ -246,8 +248,10 @@ TEST_F(SdchFilterTest, BasicDictionary) {
// Decode with a large buffer (larger than test input, or compressed data).
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
- filter->SetURL(url);
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(url);
+
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
size_t feed_block_size = 100;
size_t output_block_size = 100;
@@ -257,8 +261,7 @@ TEST_F(SdchFilterTest, BasicDictionary) {
EXPECT_EQ(output, expanded_);
// Decode with really small buffers (size 1) to check for edge effects.
- filter.reset((Filter::Factory(filter_types, kInputBufferSize)));
- filter->SetURL(url);
+ filter.reset((Filter::Factory(filter_types, filter_context)));
feed_block_size = 1;
output_block_size = 1;
@@ -284,12 +287,14 @@ TEST_F(SdchFilterTest, NoDecodeHttps) {
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL("https://" + kSampleDomain));
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
+
const size_t feed_block_size(100);
const size_t output_block_size(100);
std::string output;
- filter->SetURL(GURL("https://" + kSampleDomain));
EXPECT_FALSE(FilterTestData(compressed, feed_block_size, output_block_size,
filter.get(), &output));
}
@@ -314,12 +319,14 @@ TEST_F(SdchFilterTest, NoDecodeFtp) {
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL("ftp://" + kSampleDomain));
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
+
const size_t feed_block_size(100);
const size_t output_block_size(100);
std::string output;
- filter->SetURL(GURL("ftp://" + kSampleDomain));
EXPECT_FALSE(FilterTestData(compressed, feed_block_size, output_block_size,
filter.get(), &output));
}
@@ -340,12 +347,14 @@ TEST_F(SdchFilterTest, NoDecodeFileColon) {
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL("file://" + kSampleDomain));
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
+
const size_t feed_block_size(100);
const size_t output_block_size(100);
std::string output;
- filter->SetURL(GURL("file://" + kSampleDomain));
EXPECT_FALSE(FilterTestData(compressed, feed_block_size, output_block_size,
filter.get(), &output));
}
@@ -366,12 +375,14 @@ TEST_F(SdchFilterTest, NoDecodeAboutColon) {
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL("about://" + kSampleDomain));
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
+
const size_t feed_block_size(100);
const size_t output_block_size(100);
std::string output;
- filter->SetURL(GURL("about://" + kSampleDomain));
EXPECT_FALSE(FilterTestData(compressed, feed_block_size, output_block_size,
filter.get(), &output));
}
@@ -392,12 +403,14 @@ TEST_F(SdchFilterTest, NoDecodeJavaScript) {
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL("javascript://" + kSampleDomain));
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
+
const size_t feed_block_size(100);
const size_t output_block_size(100);
std::string output;
- filter->SetURL(GURL("javascript://" + kSampleDomain));
EXPECT_FALSE(FilterTestData(compressed, feed_block_size, output_block_size,
filter.get(), &output));
}
@@ -418,12 +431,14 @@ TEST_F(SdchFilterTest, CanStillDecodeHttp) {
filter_types.push_back(Filter::FILTER_TYPE_SDCH);
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL("http://" + kSampleDomain));
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
+
const size_t feed_block_size(100);
const size_t output_block_size(100);
std::string output;
- filter->SetURL(GURL("http://" + kSampleDomain));
EXPECT_TRUE(FilterTestData(compressed, feed_block_size, output_block_size,
filter.get(), &output));
}
@@ -445,10 +460,11 @@ TEST_F(SdchFilterTest, CrossDomainDictionaryUse) {
const int kInputBufferSize(100);
// Decode with content arriving from the "wrong" domain.
- // This tests CanSet() in the sdch_manager_->
- scoped_ptr<Filter> filter((Filter::Factory(filter_types, kInputBufferSize)));
+ // This tests SdchManager::CanSet().
+ MockFilterContext filter_context(kInputBufferSize);
GURL wrong_domain_url("http://www.wrongdomain.com");
- filter->SetURL(wrong_domain_url);
+ filter_context.SetURL(wrong_domain_url);
+ scoped_ptr<Filter> filter((Filter::Factory(filter_types, filter_context)));
size_t feed_block_size = 100;
size_t output_block_size = 100;
@@ -486,8 +502,9 @@ TEST_F(SdchFilterTest, DictionaryPathValidation) {
const int kInputBufferSize(100);
// Test decode the path data, arriving from a valid path.
- scoped_ptr<Filter> filter((Filter::Factory(filter_types, kInputBufferSize)));
- filter->SetURL(GURL(url_string + path));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL(url_string + path));
+ scoped_ptr<Filter> filter((Filter::Factory(filter_types, filter_context)));
size_t feed_block_size = 100;
size_t output_block_size = 100;
@@ -498,8 +515,8 @@ TEST_F(SdchFilterTest, DictionaryPathValidation) {
EXPECT_EQ(output, expanded_);
// Test decode the path data, arriving from a invalid path.
- filter.reset((Filter::Factory(filter_types, kInputBufferSize)));
- filter->SetURL(GURL(url_string));
+ filter_context.SetURL(GURL(url_string));
+ filter.reset((Filter::Factory(filter_types, filter_context)));
feed_block_size = 100;
output_block_size = 100;
@@ -539,8 +556,9 @@ TEST_F(SdchFilterTest, DictionaryPortValidation) {
const int kInputBufferSize(100);
// Test decode the port data, arriving from a valid port.
- scoped_ptr<Filter> filter((Filter::Factory(filter_types, kInputBufferSize)));
- filter->SetURL(GURL(url_string + ":" + port));
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(GURL(url_string + ":" + port));
+ scoped_ptr<Filter> filter((Filter::Factory(filter_types, filter_context)));
size_t feed_block_size = 100;
size_t output_block_size = 100;
@@ -550,8 +568,8 @@ TEST_F(SdchFilterTest, DictionaryPortValidation) {
EXPECT_EQ(output, expanded_);
// Test decode the port data, arriving from a valid (default) port.
- filter.reset((Filter::Factory(filter_types, kInputBufferSize)));
- filter->SetURL(GURL(url_string)); // Default port.
+ filter_context.SetURL(GURL(url_string)); // Default port.
+ filter.reset((Filter::Factory(filter_types, filter_context)));
feed_block_size = 100;
output_block_size = 100;
@@ -561,8 +579,8 @@ TEST_F(SdchFilterTest, DictionaryPortValidation) {
EXPECT_EQ(output, expanded_);
// Test decode the port data, arriving from a invalid port.
- filter.reset((Filter::Factory(filter_types, kInputBufferSize)));
- filter->SetURL(GURL(url_string + ":" + port + "1"));
+ filter_context.SetURL(GURL(url_string + ":" + port + "1"));
+ filter.reset((Filter::Factory(filter_types, filter_context)));
feed_block_size = 100;
output_block_size = 100;
@@ -661,9 +679,9 @@ TEST_F(SdchFilterTest, FilterChaining) {
CHECK(kLargeInputBufferSize > gzip_compressed_sdch.size());
CHECK(kLargeInputBufferSize > sdch_compressed.size());
CHECK(kLargeInputBufferSize > expanded_.size());
- scoped_ptr<Filter> filter(Filter::Factory(filter_types,
- kLargeInputBufferSize));
- filter->SetURL(url);
+ MockFilterContext filter_context(kLargeInputBufferSize);
+ filter_context.SetURL(url);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
// Verify that chained filter is waiting for data.
char tiny_output_buffer[10];
@@ -687,8 +705,9 @@ TEST_F(SdchFilterTest, FilterChaining) {
// We'll go even further, and force the chain to flush the buffer between the
// two filters more than once (that is why we multiply by 2).
CHECK(kMidSizedInputBufferSize * 2 < sdch_compressed.size());
- filter.reset(Filter::Factory(filter_types, kMidSizedInputBufferSize));
- filter->SetURL(url);
+ filter_context.SetBufferSize(kMidSizedInputBufferSize);
+ filter_context.SetURL(url);
+ filter.reset(Filter::Factory(filter_types, filter_context));
feed_block_size = kMidSizedInputBufferSize;
output_block_size = kMidSizedInputBufferSize;
@@ -698,8 +717,8 @@ TEST_F(SdchFilterTest, FilterChaining) {
EXPECT_EQ(output, expanded_);
// Next try with a tiny input and output buffer to cover edge effects.
- filter.reset(Filter::Factory(filter_types, kLargeInputBufferSize));
- filter->SetURL(url);
+ filter_context.SetBufferSize(kLargeInputBufferSize);
+ filter.reset(Filter::Factory(filter_types, filter_context));
feed_block_size = 1;
output_block_size = 1;
@@ -732,8 +751,10 @@ TEST_F(SdchFilterTest, DefaultGzipIfSdch) {
// First try with a large buffer (larger than test input, or compressed data).
const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
- filter->SetURL(url);
+ MockFilterContext filter_context(kInputBufferSize);
+ filter_context.SetURL(url);
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types, filter_context));
+
// Verify that chained filter is waiting for data.
char tiny_output_buffer[10];
@@ -749,8 +770,7 @@ TEST_F(SdchFilterTest, DefaultGzipIfSdch) {
EXPECT_EQ(output, expanded_);
// Next try with a tiny buffer to cover edge effects.
- filter.reset(Filter::Factory(filter_types, kInputBufferSize));
- filter->SetURL(url);
+ filter.reset(Filter::Factory(filter_types, filter_context));
feed_block_size = 1;
output_block_size = 1;
diff --git a/net/url_request/url_request_about_job.cc b/net/url_request/url_request_about_job.cc
index a9afd67..9c4bd30 100644
--- a/net/url_request/url_request_about_job.cc
+++ b/net/url_request/url_request_about_job.cc
@@ -27,7 +27,7 @@ void URLRequestAboutJob::Start() {
this, &URLRequestAboutJob::StartAsync));
}
-bool URLRequestAboutJob::GetMimeType(std::string* mime_type) {
+bool URLRequestAboutJob::GetMimeType(std::string* mime_type) const {
*mime_type = "text/html";
return true;
}
diff --git a/net/url_request/url_request_about_job.h b/net/url_request/url_request_about_job.h
index f64b1f3e..bcddd6a 100644
--- a/net/url_request/url_request_about_job.h
+++ b/net/url_request/url_request_about_job.h
@@ -5,15 +5,17 @@
#ifndef NET_URL_REQUEST_URL_REQUEST_ABOUT_JOB_H_
#define NET_URL_REQUEST_URL_REQUEST_ABOUT_JOB_H_
+#include <string>
+
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_job.h"
class URLRequestAboutJob : public URLRequestJob {
public:
- URLRequestAboutJob(URLRequest* request);
+ explicit URLRequestAboutJob(URLRequest* request);
virtual void Start();
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
static URLRequest::ProtocolFactory Factory;
diff --git a/net/url_request/url_request_file_dir_job.cc b/net/url_request/url_request_file_dir_job.cc
index 1608684..4a21f98 100644
--- a/net/url_request/url_request_file_dir_job.cc
+++ b/net/url_request/url_request_file_dir_job.cc
@@ -86,7 +86,7 @@ bool URLRequestFileDirJob::ReadRawData(net::IOBuffer* buf, int buf_size,
return false;
}
-bool URLRequestFileDirJob::GetMimeType(string* mime_type) {
+bool URLRequestFileDirJob::GetMimeType(string* mime_type) const {
*mime_type = "text/html";
return true;
}
diff --git a/net/url_request/url_request_file_dir_job.h b/net/url_request/url_request_file_dir_job.h
index c3881dc..6dea9d7 100644
--- a/net/url_request/url_request_file_dir_job.h
+++ b/net/url_request/url_request_file_dir_job.h
@@ -5,6 +5,8 @@
#ifndef NET_URL_REQUEST_URL_REQUEST_FILE_DIR_JOB_H__
#define NET_URL_REQUEST_URL_REQUEST_FILE_DIR_JOB_H__
+#include <string>
+
#include "base/file_path.h"
#include "base/file_util.h"
#include "net/base/directory_lister.h"
@@ -22,7 +24,7 @@ class URLRequestFileDirJob
virtual void StartAsync();
virtual void Kill();
virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
virtual bool GetCharset(std::string* charset);
virtual bool IsRedirectResponse(GURL* location, int* http_status_code);
diff --git a/net/url_request/url_request_file_job.cc b/net/url_request/url_request_file_job.cc
index e120aa6..bc7a50d 100644
--- a/net/url_request/url_request_file_job.cc
+++ b/net/url_request/url_request_file_job.cc
@@ -150,7 +150,7 @@ bool URLRequestFileJob::ReadRawData(net::IOBuffer* dest, int dest_size,
return false;
}
-bool URLRequestFileJob::GetMimeType(std::string* mime_type) {
+bool URLRequestFileJob::GetMimeType(std::string* mime_type) const {
DCHECK(request_);
return net::GetMimeTypeFromFile(file_path_, mime_type);
}
diff --git a/net/url_request/url_request_file_job.h b/net/url_request/url_request_file_job.h
index 3aff74b..1558c61 100644
--- a/net/url_request/url_request_file_job.h
+++ b/net/url_request/url_request_file_job.h
@@ -5,6 +5,8 @@
#ifndef NET_URL_REQUEST_URL_REQUEST_FILE_JOB_H_
#define NET_URL_REQUEST_URL_REQUEST_FILE_JOB_H_
+#include <string>
+
#include "base/file_path.h"
#include "base/file_util.h"
#include "net/base/completion_callback.h"
@@ -22,7 +24,7 @@ class URLRequestFileJob : public URLRequestJob {
virtual void Kill();
virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int* bytes_read);
virtual bool IsRedirectResponse(GURL* location, int* http_status_code);
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
static URLRequest::ProtocolFactory Factory;
diff --git a/net/url_request/url_request_ftp_job.cc b/net/url_request/url_request_ftp_job.cc
index cf41f11..dee4fbb 100644
--- a/net/url_request/url_request_ftp_job.cc
+++ b/net/url_request/url_request_ftp_job.cc
@@ -80,7 +80,7 @@ void URLRequestFtpJob::Start() {
SendRequest();
}
-bool URLRequestFtpJob::GetMimeType(std::string* mime_type) {
+bool URLRequestFtpJob::GetMimeType(std::string* mime_type) const {
if (!is_directory_)
return false;
diff --git a/net/url_request/url_request_ftp_job.h b/net/url_request/url_request_ftp_job.h
index 44e845c..9dcd7e4 100644
--- a/net/url_request/url_request_ftp_job.h
+++ b/net/url_request/url_request_ftp_job.h
@@ -5,6 +5,8 @@
#ifndef NET_URL_REQUEST_URL_REQUEST_FTP_JOB_H_
#define NET_URL_REQUEST_URL_REQUEST_FTP_JOB_H_
+#include <string>
+
#include "net/url_request/url_request_inet_job.h"
// A basic FTP job that handles download files and showing directory listings.
@@ -16,13 +18,13 @@ class URLRequestFtpJob : public URLRequestInetJob {
// URLRequestJob methods:
virtual void Start();
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
// URLRequestInetJob methods:
virtual void OnIOComplete(const AsyncResult& result);
protected:
- URLRequestFtpJob(URLRequest* request);
+ explicit URLRequestFtpJob(URLRequest* request);
// Starts the WinInet request.
virtual void SendRequest();
@@ -65,17 +67,19 @@ class URLRequestFtpJob : public URLRequestInetJob {
// Continuation function for calling OnIOComplete through the message loop.
virtual void ContinueIOComplete(int bytes_written);
- // Continuation function for calling NotifyHeadersComplete through
- //the message loop
+ // Continuation function for calling NotifyHeadersComplete through the message
+ // loop.
virtual void ContinueNotifyHeadersComplete();
typedef enum {
START = 0x200, // initial state of the ftp job
CONNECTING, // opening the url
- SETTING_CUR_DIRECTORY, // attempting to change current dir to match request
- FINDING_FIRST_FILE, // retrieving first file information in cur dir (by FtpFindFirstFile)
+ SETTING_CUR_DIRECTORY, // attempting to change current dir to match request
+ FINDING_FIRST_FILE, // retrieving first file information in cur dir (by
+ // FtpFindFirstFile)
GETTING_DIRECTORY, // retrieving the directory listing (if directory)
- GETTING_FILE_HANDLE, // initiate access to file by call to FtpOpenFile (if file)
+ GETTING_FILE_HANDLE, // initiate access to file by call to FtpOpenFile
+ // (if file).
GETTING_FILE, // retrieving the file (if file)
DONE // URLRequestInetJob is reading the response now
} FtpJobState;
@@ -91,7 +95,7 @@ class URLRequestFtpJob : public URLRequestInetJob {
bool is_directory_; // does the url point to a file or directory
WIN32_FIND_DATAA find_data_;
- std::string directory_html_; // if url is directory holds html
+ std::string directory_html_; // if url is directory holds html
// When building a directory listing, we need to temporarily hold on to the
// buffer in between the time a Read() call comes in and we get the file
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
index 32c6ed2..62ffb42 100644
--- a/net/url_request/url_request_http_job.cc
+++ b/net/url_request/url_request_http_job.cc
@@ -139,7 +139,7 @@ uint64 URLRequestHttpJob::GetUploadProgress() const {
return transaction_.get() ? transaction_->GetUploadProgress() : 0;
}
-bool URLRequestHttpJob::GetMimeType(std::string* mime_type) {
+bool URLRequestHttpJob::GetMimeType(std::string* mime_type) const {
DCHECK(transaction_.get());
if (!response_info_)
@@ -206,6 +206,7 @@ bool URLRequestHttpJob::GetContentEncodings(
if (!encoding_types->empty()) {
std::string mime_type;
GetMimeType(&mime_type);
+ // TODO(jar): Need to change this call to use the FilterContext interfaces.
Filter::FixupEncodingTypes(IsSdchResponse(), mime_type, encoding_types);
}
return !encoding_types->empty();
diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h
index 53d46f4..b165d2e 100644
--- a/net/url_request/url_request_http_job.h
+++ b/net/url_request/url_request_http_job.h
@@ -5,6 +5,7 @@
#ifndef NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H_
#define NET_URL_REQUEST_URL_REQUEST_HTTP_JOB_H_
+#include <string>
#include <vector>
#include "base/scoped_ptr.h"
@@ -37,7 +38,7 @@ class URLRequestHttpJob : public URLRequestJob {
virtual void Kill();
virtual net::LoadState GetLoadState() const;
virtual uint64 GetUploadProgress() const;
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
virtual bool GetCharset(std::string* charset);
virtual void GetResponseInfo(net::HttpResponseInfo* info);
virtual bool GetResponseCookies(std::vector<std::string>* cookies);
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc
index a82f879..2873ae2 100644
--- a/net/url_request/url_request_job.cc
+++ b/net/url_request/url_request_job.cc
@@ -18,7 +18,8 @@ using base::Time;
using base::TimeTicks;
// Buffer size allocated when de-compressing data.
-static const int kFilterBufSize = 32 * 1024;
+// static
+const int URLRequestJob::kFilterBufSize = 32 * 1024;
URLRequestJob::URLRequestJob(URLRequest* request)
: request_(request),
@@ -54,18 +55,7 @@ void URLRequestJob::DetachRequest() {
void URLRequestJob::SetupFilter() {
std::vector<Filter::FilterType> encoding_types;
if (GetContentEncodings(&encoding_types)) {
- filter_.reset(Filter::Factory(encoding_types, kFilterBufSize));
- if (filter_.get()) {
- std::string mime_type;
- GetMimeType(&mime_type);
- filter_->SetURL(request_->url());
- filter_->SetMimeType(mime_type);
- // Approximate connect time with request_time. If it is not cached, then
- // this is a good approximation for when the first bytes went on the
- // wire.
- filter_->SetConnectTime(request_->response_info_.request_time,
- request_->response_info_.was_cached);
- }
+ filter_.reset(Filter::Factory(encoding_types, *this));
}
}
@@ -96,6 +86,25 @@ void URLRequestJob::ContinueDespiteLastError() {
NOTREACHED();
}
+bool URLRequestJob::GetURL(GURL* gurl) const {
+ if (!request_)
+ return false;
+ *gurl = request_->url();
+ return true;
+}
+
+base::Time URLRequestJob::GetRequestTime() const {
+ if (!request_)
+ return base::Time();
+ return request_->request_time();
+};
+
+bool URLRequestJob::IsCachedContent() const {
+ if (!request_)
+ return false;
+ return request_->was_cached();
+};
+
// This function calls ReadData to get stream data. If a filter exists, passes
// the data to the attached filter. Then returns the output from filter back to
// the caller.
diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h
index aac3768..121519e 100644
--- a/net/url_request/url_request_job.h
+++ b/net/url_request/url_request_job.h
@@ -9,6 +9,7 @@
#include <vector>
#include "base/ref_counted.h"
+#include "base/scoped_ptr.h"
#include "net/base/filter.h"
#include "net/base/load_states.h"
@@ -27,7 +28,8 @@ class URLRequestJobMetrics;
// The URLRequestJob is using RefCounterThreadSafe because some sub classes
// can be destroyed on multiple threads. This is the case of the
// UrlRequestFileJob.
-class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> {
+class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>,
+ public FilterContext {
public:
explicit URLRequestJob(URLRequest* request);
virtual ~URLRequestJob();
@@ -86,11 +88,6 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> {
// Called to get the upload progress in bytes.
virtual uint64 GetUploadProgress() const { return 0; }
- // Called to fetch the mime_type for this request. Only makes sense for some
- // types of requests. Returns true on success. Calling this on a type that
- // doesn't have a mime type will return false.
- virtual bool GetMimeType(std::string* mime_type) { return false; }
-
// Called to fetch the charset for this request. Only makes sense for some
// types of requests. Returns true on success. Calling this on a type that
// doesn't have a charset will return false.
@@ -195,6 +192,14 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> {
// Whether we have processed the response for that request yet.
bool has_response_started() const { return has_handled_response_; }
+ // FilterContext methods:
+ // These methods are not applicable to all connections.
+ virtual bool GetMimeType(std::string* mime_type) const { return false; }
+ virtual bool GetURL(GURL* gurl) const;
+ virtual base::Time GetRequestTime() const;
+ virtual bool IsCachedContent() const;
+ virtual int GetInputStreambufferSize() const { return kFilterBufSize; }
+
protected:
// Notifies the job that headers have been received.
void NotifyHeadersComplete();
@@ -262,6 +267,9 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> {
scoped_ptr<URLRequestJobMetrics> metrics_;
private:
+ // Size of filter input buffers used by this class.
+ static const int kFilterBufSize;
+
// When data filtering is enabled, this function is used to read data
// for the filter. Returns true if raw data was read. Returns false if
// an error occurred (or we are waiting for IO to complete).
diff --git a/net/url_request/url_request_simple_job.cc b/net/url_request/url_request_simple_job.cc
index 8f684ab..38ee61b 100644
--- a/net/url_request/url_request_simple_job.cc
+++ b/net/url_request/url_request_simple_job.cc
@@ -20,7 +20,7 @@ void URLRequestSimpleJob::Start() {
this, &URLRequestSimpleJob::StartAsync));
}
-bool URLRequestSimpleJob::GetMimeType(std::string* mime_type) {
+bool URLRequestSimpleJob::GetMimeType(std::string* mime_type) const {
*mime_type = mime_type_;
return true;
}
diff --git a/net/url_request/url_request_simple_job.h b/net/url_request/url_request_simple_job.h
index 87dbcb1..0879948 100644
--- a/net/url_request/url_request_simple_job.h
+++ b/net/url_request/url_request_simple_job.h
@@ -5,17 +5,19 @@
#ifndef NET_URL_REQUEST_URL_REQUEST_SIMPLE_JOB_H_
#define NET_URL_REQUEST_URL_REQUEST_SIMPLE_JOB_H_
+#include <string>
+
#include "net/url_request/url_request_job.h"
class URLRequest;
class URLRequestSimpleJob : public URLRequestJob {
public:
- URLRequestSimpleJob(URLRequest* request);
+ explicit URLRequestSimpleJob(URLRequest* request);
virtual void Start();
virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
virtual bool GetCharset(std::string* charset);
protected:
diff --git a/net/url_request/url_request_test_job.cc b/net/url_request/url_request_test_job.cc
index fce29fd..0843c98 100644
--- a/net/url_request/url_request_test_job.cc
+++ b/net/url_request/url_request_test_job.cc
@@ -57,7 +57,7 @@ URLRequestTestJob::URLRequestTestJob(URLRequest* request)
}
// Force the response to set a reasonable MIME type
-bool URLRequestTestJob::GetMimeType(std::string* mime_type) {
+bool URLRequestTestJob::GetMimeType(std::string* mime_type) const {
DCHECK(mime_type);
*mime_type = "text/html";
return true;
diff --git a/net/url_request/url_request_test_job.h b/net/url_request/url_request_test_job.h
index 4cbf37e..b7e0566 100644
--- a/net/url_request/url_request_test_job.h
+++ b/net/url_request/url_request_test_job.h
@@ -5,6 +5,8 @@
#ifndef NET_URL_REQUEST_URL_REQUEST_TEST_JOB_H_
#define NET_URL_REQUEST_URL_REQUEST_TEST_JOB_H_
+#include <string>
+
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_job.h"
@@ -26,7 +28,7 @@
// end of the queue.
class URLRequestTestJob : public URLRequestJob {
public:
- URLRequestTestJob(URLRequest* request);
+ explicit URLRequestTestJob(URLRequest* request);
virtual ~URLRequestTestJob() {}
// the three URLs this handler will respond to
@@ -53,7 +55,7 @@ class URLRequestTestJob : public URLRequestJob {
virtual void Start();
virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
virtual void Kill();
- virtual bool GetMimeType(std::string* mime_type);
+ virtual bool GetMimeType(std::string* mime_type) const;
virtual void GetResponseInfo(net::HttpResponseInfo* info);
protected: