diff options
author | satorux@chromium.org <satorux@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-01-24 01:07:49 +0000 |
---|---|---|
committer | satorux@chromium.org <satorux@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-01-24 01:07:49 +0000 |
commit | 7a1fcff0928e06e0178aea86641d5279509866d4 (patch) | |
tree | da1cc35d9f5c3fcc21337cd8cc6cfa27693f11b4 /net | |
parent | 3523bbd474063b34f25250c6ccbbd806fd5d75ed (diff) | |
download | chromium_src-7a1fcff0928e06e0178aea86641d5279509866d4.zip chromium_src-7a1fcff0928e06e0178aea86641d5279509866d4.tar.gz chromium_src-7a1fcff0928e06e0178aea86641d5279509866d4.tar.bz2 |
net: Don't merge HTTP headers and body if the body is not in memory.
UploadDataStream::MarkConsumedAndFillBuffer() may touch the file
system if the body data comes from a file.
BUG=72001,107966
TEST=added tests.
Review URL: http://codereview.chromium.org/9270030
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@118769 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net')
-rw-r--r-- | net/base/upload_data.cc | 12 | ||||
-rw-r--r-- | net/base/upload_data.h | 4 | ||||
-rw-r--r-- | net/base/upload_data_stream.cc | 4 | ||||
-rw-r--r-- | net/base/upload_data_stream.h | 3 | ||||
-rw-r--r-- | net/base/upload_data_unittest.cc | 61 | ||||
-rw-r--r-- | net/http/http_stream_parser.cc | 64 | ||||
-rw-r--r-- | net/http/http_stream_parser.h | 5 | ||||
-rw-r--r-- | net/http/http_stream_parser_unittest.cc | 70 | ||||
-rw-r--r-- | net/net.gyp | 1 |
9 files changed, 197 insertions, 27 deletions
diff --git a/net/base/upload_data.cc b/net/base/upload_data.cc index 913c172..019254e 100644 --- a/net/base/upload_data.cc +++ b/net/base/upload_data.cc @@ -177,6 +177,18 @@ uint64 UploadData::GetContentLength() { return len; } +bool UploadData::IsInMemory() const { + // Chunks are provided as a stream, hence it's not in memory. + if (is_chunked_) + return false; + + for (size_t i = 0; i < elements_.size(); ++i) { + if (elements_[i].type() != TYPE_BYTES) + return false; + } + return true; +} + void UploadData::SetElements(const std::vector<Element>& elements) { elements_ = elements; } diff --git a/net/base/upload_data.h b/net/base/upload_data.h index 18136e7..31c1958 100644 --- a/net/base/upload_data.h +++ b/net/base/upload_data.h @@ -166,6 +166,10 @@ class NET_EXPORT UploadData : public base::RefCounted<UploadData> { // Returns the total size in bytes of the data to upload. uint64 GetContentLength(); + // Returns true if the upload data is entirely in memory (i.e. the + // upload data is not chunked, and all elemnts are of TYPE_BYTES). + bool IsInMemory() const; + std::vector<Element>* elements() { return &elements_; } diff --git a/net/base/upload_data_stream.cc b/net/base/upload_data_stream.cc index 7f36b7f..50e604d 100644 --- a/net/base/upload_data_stream.cc +++ b/net/base/upload_data_stream.cc @@ -167,4 +167,8 @@ bool UploadDataStream::IsOnLastChunk() const { elements.back().is_last_chunk())); } +bool UploadDataStream::IsInMemory() const { + return data_->IsInMemory(); +} + } // namespace net diff --git a/net/base/upload_data_stream.h b/net/base/upload_data_stream.h index a30850a..e71cc68 100644 --- a/net/base/upload_data_stream.h +++ b/net/base/upload_data_stream.h @@ -66,6 +66,9 @@ class NET_EXPORT UploadDataStream { // returns true only after the data in buf() has been consumed. bool IsOnLastChunk() const; + // Returns true if the upload data in the stream is entirely in memory. + bool IsInMemory() const; + // This method is provided only to be used by unit tests. static void set_merge_chunks(bool merge) { merge_chunks_ = merge; } diff --git a/net/base/upload_data_unittest.cc b/net/base/upload_data_unittest.cc new file mode 100644 index 0000000..5d56523 --- /dev/null +++ b/net/base/upload_data_unittest.cc @@ -0,0 +1,61 @@ +// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "net/base/upload_data.h" + +#include "base/file_path.h" +#include "base/memory/ref_counted.h" +#include "base/time.h" +#include "googleurl/src/gurl.h" +#include "testing/gtest/include/gtest/gtest.h" + +namespace net { + +TEST(UploadDataTest, IsInMemory_Empty) { + scoped_refptr<UploadData> upload_data = new UploadData; + ASSERT_TRUE(upload_data->IsInMemory()); +} + +TEST(UploadDataTest, IsInMemory_Bytes) { + scoped_refptr<UploadData> upload_data = new UploadData; + upload_data->AppendBytes("123", 3); + ASSERT_TRUE(upload_data->IsInMemory()); +} + +TEST(UploadDataTest, IsInMemory_File) { + scoped_refptr<UploadData> upload_data = new UploadData; + upload_data->AppendFileRange( + FilePath(FILE_PATH_LITERAL("random_file_name.txt")), + 0, 0, base::Time()); + ASSERT_FALSE(upload_data->IsInMemory()); +} + +TEST(UploadDataTest, IsInMemory_Blob) { + scoped_refptr<UploadData> upload_data = new UploadData; + upload_data->AppendBlob(GURL("blog:internal:12345")); + // Until it's resolved, we don't know what blob contains. + ASSERT_FALSE(upload_data->IsInMemory()); +} + +TEST(UploadDataTest, IsInMemory_Chunk) { + scoped_refptr<UploadData> upload_data = new UploadData; + upload_data->set_is_chunked(true); + ASSERT_FALSE(upload_data->IsInMemory()); +} + +TEST(UploadDataTest, IsInMemory_Mixed) { + scoped_refptr<UploadData> upload_data = new UploadData; + ASSERT_TRUE(upload_data->IsInMemory()); + + upload_data->AppendBytes("123", 3); + upload_data->AppendBytes("abc", 3); + ASSERT_TRUE(upload_data->IsInMemory()); + + upload_data->AppendFileRange( + FilePath(FILE_PATH_LITERAL("random_file_name.txt")), + 0, 0, base::Time()); + ASSERT_FALSE(upload_data->IsInMemory()); +} + +} // namespace net diff --git a/net/http/http_stream_parser.cc b/net/http/http_stream_parser.cc index a6e3caf..7966889 100644 --- a/net/http/http_stream_parser.cc +++ b/net/http/http_stream_parser.cc @@ -139,34 +139,30 @@ int HttpStreamParser::SendRequest(const std::string& request_line, // If we have a small request body, then we'll merge with the headers into a // single write. bool did_merge = false; - if (request_body_ != NULL && - !request_body_->is_chunked() && - request_body_->size() > 0) { - size_t merged_size = request.size() + request_body_->size(); - if (merged_size <= kMaxMergedHeaderAndBodySize) { - scoped_refptr<IOBuffer> merged_request_headers_and_body( - new IOBuffer(merged_size)); - // We'll repurpose |request_headers_| to store the merged headers and - // body. - request_headers_ = new DrainableIOBuffer( - merged_request_headers_and_body, merged_size); - - char *buf = request_headers_->data(); - memcpy(buf, request.data(), request.size()); - buf += request.size(); - - size_t todo = request_body_->size(); - while (todo) { - size_t buf_len = request_body_->buf_len(); - memcpy(buf, request_body_->buf()->data(), buf_len); - todo -= buf_len; - buf += buf_len; - request_body_->MarkConsumedAndFillBuffer(buf_len); - } - DCHECK(request_body_->eof()); - - did_merge = true; + if (ShouldMerge(request, request_body_.get())) { + size_t merged_size = request.size() + request_body->size(); + scoped_refptr<IOBuffer> merged_request_headers_and_body( + new IOBuffer(merged_size)); + // We'll repurpose |request_headers_| to store the merged headers and + // body. + request_headers_ = new DrainableIOBuffer( + merged_request_headers_and_body, merged_size); + + char *buf = request_headers_->data(); + memcpy(buf, request.data(), request.size()); + buf += request.size(); + + size_t todo = request_body_->size(); + while (todo) { + size_t buf_len = request_body_->buf_len(); + memcpy(buf, request_body_->buf()->data(), buf_len); + todo -= buf_len; + buf += buf_len; + request_body_->MarkConsumedAndFillBuffer(buf_len); } + DCHECK(request_body_->eof()); + + did_merge = true; } if (!did_merge) { @@ -814,4 +810,18 @@ int HttpStreamParser::EncodeChunk(const base::StringPiece& payload, return cursor - output; } +// static +bool HttpStreamParser::ShouldMerge(const std::string& request, + const UploadDataStream* request_body) { + if (request_body != NULL && + // IsInMemory() ensures that the request body is not chunked. + request_body->IsInMemory() && + request_body->size() > 0) { + size_t merged_size = request.size() + request_body->size(); + if (merged_size <= kMaxMergedHeaderAndBodySize) + return true; + } + return false; +} + } // namespace net diff --git a/net/http/http_stream_parser.h b/net/http/http_stream_parser.h index 34ec522..b87beab 100644 --- a/net/http/http_stream_parser.h +++ b/net/http/http_stream_parser.h @@ -91,6 +91,11 @@ class NET_EXPORT_PRIVATE HttpStreamParser : public ChunkCallback { char* output, size_t output_size); + // Returns true if request and body should be merged (i.e. the sum is + // small enough and the body is in memory, and not chunked). + static bool ShouldMerge(const std::string& request, + const UploadDataStream* request_body); + // The number of extra bytes required to encode a chunk. static const size_t kChunkHeaderFooterSize; diff --git a/net/http/http_stream_parser_unittest.cc b/net/http/http_stream_parser_unittest.cc index d40eac6..1e96038 100644 --- a/net/http/http_stream_parser_unittest.cc +++ b/net/http/http_stream_parser_unittest.cc @@ -4,9 +4,14 @@ #include "net/http/http_stream_parser.h" +#include "base/file_path.h" +#include "base/file_util.h" +#include "base/scoped_temp_dir.h" #include "base/string_piece.h" #include "base/stringprintf.h" #include "net/base/net_errors.h" +#include "net/base/upload_data.h" +#include "net/base/upload_data_stream.h" #include "testing/gtest/include/gtest/gtest.h" namespace net { @@ -74,4 +79,69 @@ TEST(HttpStreamParser, EncodeChunk_TooLargePayload) { ASSERT_EQ(ERR_INVALID_ARGUMENT, num_bytes_written); } +TEST(HttpStreamParser, ShouldMerge_NoBody) { + // Shouldn't be merged if upload data is non-existent. + ASSERT_FALSE(HttpStreamParser::ShouldMerge("some header", NULL)); +} + +TEST(HttpStreamParser, ShouldMerge_EmptyBody) { + scoped_refptr<UploadData> upload_data = new UploadData; + scoped_ptr<UploadDataStream> body( + UploadDataStream::Create(upload_data.get(), NULL)); + // Shouldn't be merged if upload data is empty. + ASSERT_FALSE(HttpStreamParser::ShouldMerge("some header", body.get())); +} + +TEST(HttpStreamParser, ShouldMerge_ChunkedBody) { + scoped_refptr<UploadData> upload_data = new UploadData; + upload_data->set_is_chunked(true); + const std::string payload = "123"; + upload_data->AppendChunk(payload.data(), payload.size(), true); + + scoped_ptr<UploadDataStream> body( + UploadDataStream::Create(upload_data.get(), NULL)); + // Shouldn't be merged if upload data carries chunked data. + ASSERT_FALSE(HttpStreamParser::ShouldMerge("some header", body.get())); +} + +TEST(HttpStreamParser, ShouldMerge_FileBody) { + scoped_refptr<UploadData> upload_data = new UploadData; + + // Create an empty temporary file. + ScopedTempDir temp_dir; + ASSERT_TRUE(temp_dir.CreateUniqueTempDir()); + FilePath temp_file_path; + ASSERT_TRUE(file_util::CreateTemporaryFileInDir(temp_dir.path(), + &temp_file_path)); + + upload_data->AppendFileRange(temp_file_path, 0, 0, base::Time()); + + scoped_ptr<UploadDataStream> body( + UploadDataStream::Create(upload_data.get(), NULL)); + // Shouldn't be merged if upload data carries a file, as it's not in-memory. + ASSERT_FALSE(HttpStreamParser::ShouldMerge("some header", body.get())); +} + +TEST(HttpStreamParser, ShouldMerge_SmallBodyInMemory) { + scoped_refptr<UploadData> upload_data = new UploadData; + const std::string payload = "123"; + upload_data->AppendBytes(payload.data(), payload.size()); + + scoped_ptr<UploadDataStream> body( + UploadDataStream::Create(upload_data.get(), NULL)); + // Yes, should be merged if the in-memory body is small here. + ASSERT_TRUE(HttpStreamParser::ShouldMerge("some header", body.get())); +} + +TEST(HttpStreamParser, ShouldMerge_LargeBodyInMemory) { + scoped_refptr<UploadData> upload_data = new UploadData; + const std::string payload(10000, 'a'); // 'a' x 10000. + upload_data->AppendBytes(payload.data(), payload.size()); + + scoped_ptr<UploadDataStream> body( + UploadDataStream::Create(upload_data.get(), NULL)); + // Shouldn't be merged if the in-memory body is large here. + ASSERT_FALSE(HttpStreamParser::ShouldMerge("some header", body.get())); +} + } // namespace net diff --git a/net/net.gyp b/net/net.gyp index cef6185..4688564 100644 --- a/net/net.gyp +++ b/net/net.gyp @@ -1050,6 +1050,7 @@ 'base/test_certificate_data.h', 'base/test_completion_callback_unittest.cc', 'base/transport_security_state_unittest.cc', + 'base/upload_data_unittest.cc', 'base/upload_data_stream_unittest.cc', 'base/x509_certificate_unittest.cc', 'base/x509_cert_types_mac_unittest.cc', |