summaryrefslogtreecommitdiffstats
path: root/net
diff options
context:
space:
mode:
authorjar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-01-30 01:38:10 +0000
committerjar@chromium.org <jar@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-01-30 01:38:10 +0000
commit462e522c983de882463619a11430a073db6b04af (patch)
tree1a6fe9201b0282d693fe8d8ce036a3503490f1eb /net
parent8674cdb422a69d1ab2b61ff88e61479a8430bec1 (diff)
downloadchromium_src-462e522c983de882463619a11430a073db6b04af.zip
chromium_src-462e522c983de882463619a11430a073db6b04af.tar.gz
chromium_src-462e522c983de882463619a11430a073db6b04af.tar.bz2
Correct handling of filter chaining of SDCH and gzip compression
When a first filter (gzip) has buffered internal data to process, and has already pushed a full 32K buffer to a second filter (such as sdch), but the second filter declined to output anything (and instead asserted it needed more data), then the filter chain was returning FILTER_OK. That status usually meant that the filter should be called again, but the calling code decided that a return of no bytes without a FILTER_NEED_MORE_DATA status must mean the filter was done. Rather than trying to change the calling infrastructure (which could impact existing filters than are not part of a chain), this patch ensures that the chained sequence never returns FILTER_OK without providing some output data. This matches semantics of individual filters, which only return FILTER_OK when they have pushed some data into the output buffer. bug=1609306 r=huanr,openvcdiff Review URL: http://codereview.chromium.org/19459 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@8944 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net')
-rw-r--r--net/base/filter.cc57
-rw-r--r--net/base/filter.h4
-rw-r--r--net/base/sdch_filter_unittest.cc59
3 files changed, 89 insertions, 31 deletions
diff --git a/net/base/filter.cc b/net/base/filter.cc
index 4dfdba9..19b7876 100644
--- a/net/base/filter.cc
+++ b/net/base/filter.cc
@@ -267,37 +267,52 @@ Filter::FilterStatus Filter::ReadFilteredData(char* dest_buffer,
}
Filter::FilterStatus Filter::ReadData(char* dest_buffer, int* dest_len) {
+ const int dest_buffer_capacity = *dest_len;
if (last_status_ == FILTER_ERROR)
return last_status_;
if (!next_filter_.get())
return last_status_ = ReadFilteredData(dest_buffer, dest_len);
if (last_status_ == FILTER_NEED_MORE_DATA && !stream_data_len())
return next_filter_->ReadData(dest_buffer, dest_len);
- if (next_filter_->last_status() == FILTER_NEED_MORE_DATA) {
- // Push data into next filter's input.
- net::IOBuffer* next_buffer = next_filter_->stream_buffer();
- int next_size = next_filter_->stream_buffer_size();
- last_status_ = ReadFilteredData(next_buffer->data(), &next_size);
- next_filter_->FlushStreamBuffer(next_size);
- switch (last_status_) {
- case FILTER_ERROR:
- return last_status_;
-
- case FILTER_NEED_MORE_DATA:
- return next_filter_->ReadData(dest_buffer, dest_len);
- case FILTER_OK:
- case FILTER_DONE:
- break;
+ do {
+ if (next_filter_->last_status() == FILTER_NEED_MORE_DATA) {
+ PushDataIntoNextFilter();
+ if (FILTER_ERROR == last_status_)
+ return FILTER_ERROR;
}
- }
- FilterStatus status = next_filter_->ReadData(dest_buffer, dest_len);
- // We could loop to fill next_filter_ if it needs data, but we have to be
- // careful about output buffer. Simpler is to just wait until we are called
- // again, and return FILTER_OK.
- return (status == FILTER_ERROR) ? FILTER_ERROR : FILTER_OK;
+ *dest_len = dest_buffer_capacity; // Reset the input/output parameter.
+ next_filter_->ReadData(dest_buffer, dest_len);
+ if (FILTER_NEED_MORE_DATA == last_status_)
+ return next_filter_->last_status();
+
+ // In the case where this filter has data internally, and is indicating such
+ // with a last_status_ of FILTER_OK, but at the same time the next filter in
+ // the chain indicated it FILTER_NEED_MORE_DATA, we have to be cautious
+ // about confusing the caller. The API confusion can appear if we return
+ // FILTER_OK (suggesting we have more data in aggregate), but yet we don't
+ // populate our output buffer. When that is the case, we need to
+ // alternately call our filter element, and the next_filter element until we
+ // get out of this state (by pumping data into the next filter until it
+ // outputs data, or it runs out of data and reports that it NEED_MORE_DATA.)
+ } while (FILTER_OK == last_status_ &&
+ FILTER_NEED_MORE_DATA == next_filter_->last_status() &&
+ 0 == *dest_len);
+
+ if (next_filter_->last_status() == FILTER_ERROR)
+ return FILTER_ERROR;
+ return FILTER_OK;
}
+void Filter::PushDataIntoNextFilter() {
+ net::IOBuffer* next_buffer = next_filter_->stream_buffer();
+ int next_size = next_filter_->stream_buffer_size();
+ last_status_ = ReadFilteredData(next_buffer->data(), &next_size);
+ if (FILTER_ERROR != last_status_)
+ next_filter_->FlushStreamBuffer(next_size);
+}
+
+
bool Filter::FlushStreamBuffer(int stream_data_len) {
if (stream_data_len <= 0 || stream_data_len > stream_buffer_size_)
return false;
diff --git a/net/base/filter.h b/net/base/filter.h
index 87ef6fd..8d07e99 100644
--- a/net/base/filter.h
+++ b/net/base/filter.h
@@ -200,6 +200,10 @@ class Filter {
base::Time connect_time_;
bool was_cached_;
+ private: // TODO(jar): Make more data private by moving this up higher.
+ // Helper function to empty our output into the next filter's input.
+ void PushDataIntoNextFilter();
+
// To facilitate error recovery in SDCH filters, allow filter to know if
// content is text/html by checking within this mime type (SDCH filter may
// do a meta-refresh via html).
diff --git a/net/base/sdch_filter_unittest.cc b/net/base/sdch_filter_unittest.cc
index b5c31c2..df34b22 100644
--- a/net/base/sdch_filter_unittest.cc
+++ b/net/base/sdch_filter_unittest.cc
@@ -21,12 +21,25 @@
// Note an SDCH dictionary has extra meta-data before the VCDIFF dictionary.
static const char kTestVcdiffDictionary[] = "DictionaryFor"
"SdchCompression1SdchCompression2SdchCompression3SdchCompression\n";
-// Pre-compression test data.
-static const char kTestData[] = "TestData "
- "SdchCompression1SdchCompression2SdchCompression3SdchCompression\n";
+// Pre-compression test data. Note that we pad with a lot of highly gzip
+// compressible content to help to exercise the chaining pipeline. That is why
+// there are a PILE of zeros at the start and end.
+// This will ensure that gzip compressed data can be fed to the chain in one
+// gulp, but (with careful selection of intermediate buffers) that it takes
+// several sdch buffers worth of data to satisfy the sdch filter. See detailed
+// CHECK() calls in FilterChaining test for specifics.
+static const char kTestData[] = "0000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000TestData "
+ "SdchCompression1SdchCompression2SdchCompression3SdchCompression"
+ "00000000000000000000000000000000000000000000000000000000000000000000000000"
+ "000000000000000000000000000000000000000\n";
+
// Note SDCH compressed data will include a reference to the SDCH dictionary.
static const char kCompressedTestData[] =
- "\326\303\304\0\0\001M\0\022I\0\t\003\001TestData \n\023\100\r";
+ "\326\303\304\0\0\001M\0\201S\202\004\0\201E\006\001"
+ "00000000000000000000000000000000000000000000000000000000000000000000000000"
+ "TestData 00000000000000000000000000000000000000000000000000000000000000000"
+ "000000000000000000000000000000000000000000000000\n\001S\023\077\001r\r";
//------------------------------------------------------------------------------
@@ -111,6 +124,9 @@ static bool FilterTestData(const std::string& source,
output->append(output_buffer.get(), buffer_length);
if (status == Filter::FILTER_ERROR)
return false;
+ // Callers assume that FILTER_OK with no output buffer means FILTER_DONE.
+ if (Filter::FILTER_OK == status && 0 == buffer_length)
+ return true;
if (copy_amount == 0 && buffer_length == 0)
return true;
} while (1);
@@ -641,8 +657,12 @@ TEST_F(SdchFilterTest, FilterChaining) {
filter_types.push_back(Filter::FILTER_TYPE_GZIP);
// First try with a large buffer (larger than test input, or compressed data).
- const int kInputBufferSize(100);
- scoped_ptr<Filter> filter(Filter::Factory(filter_types, kInputBufferSize));
+ const size_t kLargeInputBufferSize(1000); // Used internally in filters.
+ CHECK(kLargeInputBufferSize > gzip_compressed_sdch.size());
+ CHECK(kLargeInputBufferSize > sdch_compressed.size());
+ CHECK(kLargeInputBufferSize > expanded_.size());
+ scoped_ptr<Filter> filter(Filter::Factory(filter_types,
+ kLargeInputBufferSize));
filter->SetURL(url);
// Verify that chained filter is waiting for data.
@@ -651,15 +671,34 @@ TEST_F(SdchFilterTest, FilterChaining) {
EXPECT_EQ(Filter::FILTER_NEED_MORE_DATA,
filter->ReadData(tiny_output_buffer, &tiny_output_size));
- size_t feed_block_size = 100;
- size_t output_block_size = 100;
+ // Make chain process all data.
+ size_t feed_block_size = kLargeInputBufferSize;
+ size_t output_block_size = kLargeInputBufferSize;
std::string output;
EXPECT_TRUE(FilterTestData(gzip_compressed_sdch, feed_block_size,
output_block_size, filter.get(), &output));
EXPECT_EQ(output, expanded_);
- // Next try with a tiny buffer to cover edge effects.
- filter.reset(Filter::Factory(filter_types, kInputBufferSize));
+ // Next try with a mid-sized internal buffer size.
+ const size_t kMidSizedInputBufferSize(100);
+ // Buffer should be big enough to swallow whole gzip content.
+ CHECK(kMidSizedInputBufferSize > gzip_compressed_sdch.size());
+ // Buffer should be small enough that entire SDCH content can't fit.
+ // We'll go even further, and force the chain to flush the buffer between the
+ // two filters more than once (that is why we multiply by 2).
+ CHECK(kMidSizedInputBufferSize * 2 < sdch_compressed.size());
+ filter.reset(Filter::Factory(filter_types, kMidSizedInputBufferSize));
+ filter->SetURL(url);
+
+ feed_block_size = kMidSizedInputBufferSize;
+ output_block_size = kMidSizedInputBufferSize;
+ output.clear();
+ EXPECT_TRUE(FilterTestData(gzip_compressed_sdch, feed_block_size,
+ output_block_size, filter.get(), &output));
+ EXPECT_EQ(output, expanded_);
+
+ // Next try with a tiny input and output buffer to cover edge effects.
+ filter.reset(Filter::Factory(filter_types, kLargeInputBufferSize));
filter->SetURL(url);
feed_block_size = 1;