summaryrefslogtreecommitdiffstats
path: root/net/url_request
diff options
context:
space:
mode:
authorrvargas@google.com <rvargas@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2009-01-24 02:39:54 +0000
committerrvargas@google.com <rvargas@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2009-01-24 02:39:54 +0000
commite993abfe81feaa374d476828a44942d296bdcc78 (patch)
tree201fffef0ba93ce41afaf2bd6e61e05b61c04028 /net/url_request
parent0afe80d755b899c188313629ea3f45f0fe5be981 (diff)
downloadchromium_src-e993abfe81feaa374d476828a44942d296bdcc78.zip
chromium_src-e993abfe81feaa374d476828a44942d296bdcc78.tar.gz
chromium_src-e993abfe81feaa374d476828a44942d296bdcc78.tar.bz2
revert r8603
Review URL: http://codereview.chromium.org/18576 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@8605 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r--net/url_request/mime_sniffer_proxy.cc16
-rw-r--r--net/url_request/mime_sniffer_proxy.h9
-rw-r--r--net/url_request/url_request.cc2
-rw-r--r--net/url_request/url_request.h21
-rw-r--r--net/url_request/url_request_file_dir_job.cc8
-rw-r--r--net/url_request/url_request_file_dir_job.h4
-rw-r--r--net/url_request/url_request_file_job.cc6
-rw-r--r--net/url_request/url_request_file_job.h2
-rw-r--r--net/url_request/url_request_http_job.cc3
-rw-r--r--net/url_request/url_request_http_job.h2
-rw-r--r--net/url_request/url_request_inet_job.cc4
-rw-r--r--net/url_request/url_request_inet_job.h2
-rw-r--r--net/url_request/url_request_job.cc10
-rw-r--r--net/url_request/url_request_job.h7
-rw-r--r--net/url_request/url_request_simple_job.cc4
-rw-r--r--net/url_request/url_request_simple_job.h2
-rw-r--r--net/url_request/url_request_test_job.cc5
-rw-r--r--net/url_request/url_request_test_job.h4
-rw-r--r--net/url_request/url_request_unittest.h15
19 files changed, 54 insertions, 72 deletions
diff --git a/net/url_request/mime_sniffer_proxy.cc b/net/url_request/mime_sniffer_proxy.cc
index 3a8d9a9..24b1fbe 100644
--- a/net/url_request/mime_sniffer_proxy.cc
+++ b/net/url_request/mime_sniffer_proxy.cc
@@ -6,13 +6,10 @@
#include "net/base/mime_sniffer.h"
-static const int kBufferSize = 1024;
-
MimeSnifferProxy::MimeSnifferProxy(URLRequest* request,
URLRequest::Delegate* delegate)
: request_(request), delegate_(delegate),
- sniff_content_(false), error_(false),
- buf_(new net::IOBuffer(kBufferSize)) {
+ sniff_content_(false), error_(false) {
request->set_delegate(this);
}
@@ -23,7 +20,7 @@ void MimeSnifferProxy::OnResponseStarted(URLRequest* request) {
// We need to read content before we know the mime type,
// so we don't call OnResponseStarted.
sniff_content_ = true;
- if (request_->Read(buf_, kBufferSize, &bytes_read_) && bytes_read_) {
+ if (request_->Read(buf_, sizeof(buf_), &bytes_read_) && bytes_read_) {
OnReadCompleted(request, bytes_read_);
} else if (!request_->status().is_io_pending()) {
error_ = true;
@@ -35,8 +32,7 @@ void MimeSnifferProxy::OnResponseStarted(URLRequest* request) {
delegate_->OnResponseStarted(request);
}
-bool MimeSnifferProxy::Read(net::IOBuffer* buf, int max_bytes,
- int *bytes_read) {
+bool MimeSnifferProxy::Read(char* buf, int max_bytes, int *bytes_read) {
if (sniff_content_) {
// This is the first call to Read() after we've sniffed content.
// Return our local buffer or the error we ran into.
@@ -47,7 +43,7 @@ bool MimeSnifferProxy::Read(net::IOBuffer* buf, int max_bytes,
return false;
}
- memcpy(buf->data(), buf_->data(), bytes_read_);
+ memcpy(buf, buf_, bytes_read_);
*bytes_read = bytes_read_;
return true;
}
@@ -61,8 +57,8 @@ void MimeSnifferProxy::OnReadCompleted(URLRequest* request, int bytes_read) {
std::string type_hint;
request_->GetMimeType(&type_hint);
bytes_read_ = bytes_read;
- net::SniffMimeType(buf_->data(), bytes_read_, request_->url(),
- type_hint, &mime_type_);
+ net::SniffMimeType(
+ buf_, bytes_read_, request_->url(), type_hint, &mime_type_);
} else {
error_ = true;
}
diff --git a/net/url_request/mime_sniffer_proxy.h b/net/url_request/mime_sniffer_proxy.h
index 898ea60..0029a80 100644
--- a/net/url_request/mime_sniffer_proxy.h
+++ b/net/url_request/mime_sniffer_proxy.h
@@ -19,10 +19,6 @@
// 2) ms_->mime_type() -- returns the sniffed mime type of the data;
// valid after OnResponseStarted() is called.
-#ifndef NET_URL_REQUEST_MIME_SNIFFER_PROXY_H_
-#define NET_URL_REQUEST_MIME_SNIFFER_PROXY_H_
-
-#include "net/base/io_buffer.h"
#include "net/url_request/url_request.h"
class MimeSnifferProxy : public URLRequest::Delegate {
@@ -52,7 +48,7 @@ class MimeSnifferProxy : public URLRequest::Delegate {
}
// Wrapper around URLRequest::Read.
- bool Read(net::IOBuffer* buf, int max_bytes, int *bytes_read);
+ bool Read(char* buf, int max_bytes, int *bytes_read);
// Return the sniffed mime type of the request. Valid after
// OnResponseStarted() has been called on the delegate.
@@ -73,9 +69,8 @@ class MimeSnifferProxy : public URLRequest::Delegate {
bool error_;
// A buffer for the first bit of the request.
- scoped_refptr<net::IOBuffer> buf_;
+ char buf_[1024];
// The number of bytes we've read into the buffer.
int bytes_read_;
};
-#endif // NET_URL_REQUEST_MIME_SNIFFER_PROXY_H_
diff --git a/net/url_request/url_request.cc b/net/url_request/url_request.cc
index 19c9810..87facba 100644
--- a/net/url_request/url_request.cc
+++ b/net/url_request/url_request.cc
@@ -252,7 +252,7 @@ void URLRequest::CancelWithError(int os_error) {
// about being called recursively.
}
-bool URLRequest::Read(net::IOBuffer* dest, int dest_size, int *bytes_read) {
+bool URLRequest::Read(char* dest, int dest_size, int *bytes_read) {
DCHECK(job_);
DCHECK(bytes_read);
DCHECK(!job_->is_done());
diff --git a/net/url_request/url_request.h b/net/url_request/url_request.h
index f697362..5dfc711 100644
--- a/net/url_request/url_request.h
+++ b/net/url_request/url_request.h
@@ -21,9 +21,6 @@
#include "net/url_request/url_request_context.h"
#include "net/url_request/url_request_status.h"
-namespace net {
-class IOBuffer;
-}
class URLRequestJob;
// This stores the values of the Set-Cookie headers received during the request.
@@ -370,14 +367,16 @@ class URLRequest {
// successful status.
// If data is available, Read will return true, and the data and length will
// be returned immediately. If data is not available, Read returns false,
- // and an asynchronous Read is initiated. The Read is finished when
- // the caller receives the OnReadComplete callback. OnReadComplete will be
- // always be called, even if there was a failure.
+ // and an asynchronous Read is initiated. The caller guarantees the
+ // buffer provided will be available until the Read is finished. The
+ // Read is finished when the caller receives the OnReadComplete
+ // callback. OnReadComplete will be always be called, even if there
+ // was a failure.
//
- // The buf parameter is a buffer to receive the data. If the operation
- // completes asynchronously, the implementation will reference the buffer
- // until OnReadComplete is called. The buffer must be at least max_bytes in
- // length.
+ // The buf parameter is a buffer to receive the data. Once the read is
+ // initiated, the caller guarantees availability of this buffer until
+ // the OnReadComplete is received. The buffer must be at least
+ // max_bytes in length.
//
// The max_bytes parameter is the maximum number of bytes to read.
//
@@ -387,7 +386,7 @@ class URLRequest {
//
// If a read error occurs, Read returns false and the request->status
// will be set to an error.
- bool Read(net::IOBuffer* buf, int max_bytes, int *bytes_read);
+ bool Read(char* buf, int max_bytes, int *bytes_read);
// One of the following two methods should be called in response to an
// OnAuthRequired() callback (and only then).
diff --git a/net/url_request/url_request_file_dir_job.cc b/net/url_request/url_request_file_dir_job.cc
index 1608684..df24eab 100644
--- a/net/url_request/url_request_file_dir_job.cc
+++ b/net/url_request/url_request_file_dir_job.cc
@@ -26,6 +26,7 @@ URLRequestFileDirJob::URLRequestFileDirJob(URLRequest* request,
list_complete_(false),
wrote_header_(false),
read_pending_(false),
+ read_buffer_(NULL),
read_buffer_length_(0) {
}
@@ -67,7 +68,7 @@ void URLRequestFileDirJob::Kill() {
lister_->Cancel();
}
-bool URLRequestFileDirJob::ReadRawData(net::IOBuffer* buf, int buf_size,
+bool URLRequestFileDirJob::ReadRawData(char* buf, int buf_size,
int *bytes_read) {
DCHECK(bytes_read);
*bytes_read = 0;
@@ -75,7 +76,7 @@ bool URLRequestFileDirJob::ReadRawData(net::IOBuffer* buf, int buf_size,
if (is_done())
return true;
- if (FillReadBuffer(buf->data(), buf_size, bytes_read))
+ if (FillReadBuffer(buf, buf_size, bytes_read))
return true;
// We are waiting for more data
@@ -182,8 +183,7 @@ bool URLRequestFileDirJob::FillReadBuffer(char *buf, int buf_size,
void URLRequestFileDirJob::CompleteRead() {
if (read_pending_) {
int bytes_read;
- if (FillReadBuffer(read_buffer_->data(), read_buffer_length_,
- &bytes_read)) {
+ if (FillReadBuffer(read_buffer_, read_buffer_length_, &bytes_read)) {
// We completed the read, so reset the read buffer.
read_pending_ = false;
read_buffer_ = NULL;
diff --git a/net/url_request/url_request_file_dir_job.h b/net/url_request/url_request_file_dir_job.h
index c3881dc..882f967 100644
--- a/net/url_request/url_request_file_dir_job.h
+++ b/net/url_request/url_request_file_dir_job.h
@@ -21,7 +21,7 @@ class URLRequestFileDirJob
virtual void Start();
virtual void StartAsync();
virtual void Kill();
- virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read);
virtual bool GetMimeType(std::string* mime_type);
virtual bool GetCharset(std::string* charset);
virtual bool IsRedirectResponse(GURL* location, int* http_status_code);
@@ -55,7 +55,7 @@ class URLRequestFileDirJob
// we wait for IO to complete. When done, we fill the buffer
// manually.
bool read_pending_;
- scoped_refptr<net::IOBuffer> read_buffer_;
+ char *read_buffer_;
int read_buffer_length_;
DISALLOW_EVIL_CONSTRUCTORS(URLRequestFileDirJob);
diff --git a/net/url_request/url_request_file_job.cc b/net/url_request/url_request_file_job.cc
index 92e7f87..994a58c 100644
--- a/net/url_request/url_request_file_job.cc
+++ b/net/url_request/url_request_file_job.cc
@@ -128,12 +128,12 @@ void URLRequestFileJob::Kill() {
URLRequestJob::Kill();
}
-bool URLRequestFileJob::ReadRawData(net::IOBuffer* dest, int dest_size,
- int *bytes_read) {
+bool URLRequestFileJob::ReadRawData(
+ char* dest, int dest_size, int *bytes_read) {
DCHECK_NE(dest_size, 0);
DCHECK(bytes_read);
- int rv = stream_.Read(dest->data(), dest_size, &io_callback_);
+ int rv = stream_.Read(dest, dest_size, &io_callback_);
if (rv >= 0) {
// Data is immediately available.
*bytes_read = rv;
diff --git a/net/url_request/url_request_file_job.h b/net/url_request/url_request_file_job.h
index 0ccaa5a..a00e439 100644
--- a/net/url_request/url_request_file_job.h
+++ b/net/url_request/url_request_file_job.h
@@ -20,7 +20,7 @@ class URLRequestFileJob : public URLRequestJob {
virtual void Start();
virtual void Kill();
- virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read);
virtual bool IsRedirectResponse(GURL* location, int* http_status_code);
virtual bool GetMimeType(std::string* mime_type);
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
index 7a74500..7728f6c 100644
--- a/net/url_request/url_request_http_job.cc
+++ b/net/url_request/url_request_http_job.cc
@@ -348,8 +348,7 @@ bool URLRequestHttpJob::GetMoreData() {
return transaction_.get() && !read_in_progress_;
}
-bool URLRequestHttpJob::ReadRawData(net::IOBuffer* buf, int buf_size,
- int *bytes_read) {
+bool URLRequestHttpJob::ReadRawData(char* buf, int buf_size, int *bytes_read) {
DCHECK_NE(buf_size, 0);
DCHECK(bytes_read);
DCHECK(!read_in_progress_);
diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h
index e53db48..eda4b4b 100644
--- a/net/url_request/url_request_http_job.h
+++ b/net/url_request/url_request_http_job.h
@@ -53,7 +53,7 @@ class URLRequestHttpJob : public URLRequestJob {
virtual void CancelAuth();
virtual void ContinueDespiteLastError();
virtual bool GetMoreData();
- virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read);
// Shadows URLRequestJob's version of this method so we can grab cookies.
void NotifyHeadersComplete();
diff --git a/net/url_request/url_request_inet_job.cc b/net/url_request/url_request_inet_job.cc
index 09abfa2..2d45526 100644
--- a/net/url_request/url_request_inet_job.cc
+++ b/net/url_request/url_request_inet_job.cc
@@ -185,7 +185,7 @@ void URLRequestInetJob::OnIOComplete(const AsyncResult& result) {
}
}
-bool URLRequestInetJob::ReadRawData(net::IOBuffer* dest, int dest_size,
+bool URLRequestInetJob::ReadRawData(char* dest, int dest_size,
int *bytes_read) {
if (is_done())
return 0;
@@ -196,7 +196,7 @@ bool URLRequestInetJob::ReadRawData(net::IOBuffer* dest, int dest_size,
*bytes_read = 0;
- int result = CallInternetRead(dest->data(), dest_size, bytes_read);
+ int result = CallInternetRead(dest, dest_size, bytes_read);
if (result == ERROR_SUCCESS) {
DLOG(INFO) << "read " << *bytes_read << " bytes";
if (*bytes_read == 0)
diff --git a/net/url_request/url_request_inet_job.h b/net/url_request/url_request_inet_job.h
index bef0c4f..6341105 100644
--- a/net/url_request/url_request_inet_job.h
+++ b/net/url_request/url_request_inet_job.h
@@ -29,7 +29,7 @@ class URLRequestInetJob : public URLRequestJob {
}
virtual void Kill();
- virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read);
// URLRequestJob Authentication methods
virtual void SetAuth(const std::wstring& username,
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc
index 659156b..6e6a1df 100644
--- a/net/url_request/url_request_job.cc
+++ b/net/url_request/url_request_job.cc
@@ -8,7 +8,6 @@
#include "base/string_util.h"
#include "googleurl/src/gurl.h"
#include "net/base/auth.h"
-#include "net/base/io_buffer.h"
#include "net/base/net_errors.h"
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_job_metrics.h"
@@ -98,7 +97,7 @@ void URLRequestJob::ContinueDespiteLastError() {
// This function calls ReadData to get stream data. If a filter exists, passes
// the data to the attached filter. Then returns the output from filter back to
// the caller.
-bool URLRequestJob::Read(net::IOBuffer* buf, int buf_size, int *bytes_read) {
+bool URLRequestJob::Read(char* buf, int buf_size, int *bytes_read) {
bool rv = false;
DCHECK_LT(buf_size, 1000000); // sanity check
@@ -141,7 +140,7 @@ bool URLRequestJob::ReadRawDataForFilter(int *bytes_read) {
// TODO(mbelshe): is it possible that the filter needs *MORE* data
// when there is some data already in the buffer?
if (!filter_->stream_data_len() && !is_done()) {
- net::IOBuffer* stream_buffer = filter_->stream_buffer();
+ char* stream_buffer = filter_->stream_buffer();
int stream_buffer_size = filter_->stream_buffer_size();
rv = ReadRawData(stream_buffer, stream_buffer_size, bytes_read);
if (rv && *bytes_read > 0)
@@ -187,7 +186,7 @@ bool URLRequestJob::ReadFilteredData(int *bytes_read) {
// Get filtered data
int filtered_data_len = read_buffer_len_;
Filter::FilterStatus status;
- status = filter_->ReadData(read_buffer_->data(), &filtered_data_len);
+ status = filter_->ReadData(read_buffer_, &filtered_data_len);
switch (status) {
case Filter::FILTER_DONE: {
*bytes_read = filtered_data_len;
@@ -243,8 +242,7 @@ bool URLRequestJob::ReadFilteredData(int *bytes_read) {
return rv;
}
-bool URLRequestJob::ReadRawData(net::IOBuffer* buf, int buf_size,
- int *bytes_read) {
+bool URLRequestJob::ReadRawData(char* buf, int buf_size, int *bytes_read) {
DCHECK(bytes_read);
*bytes_read = 0;
NotifyDone(URLRequestStatus());
diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h
index 0a5744b..43fa866 100644
--- a/net/url_request/url_request_job.h
+++ b/net/url_request/url_request_job.h
@@ -17,7 +17,6 @@
namespace net {
class HttpResponseInfo;
-class IOBuffer;
class UploadData;
}
@@ -79,7 +78,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> {
// bytes read, 0 when there is no more data, or -1 if there was an error.
// This is just the backend for URLRequest::Read, see that function for more
// info.
- bool Read(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ bool Read(char* buf, int buf_size, int *bytes_read);
// Called to fetch the current load state for the job.
virtual net::LoadState GetLoadState() const { return net::LOAD_STATE_IDLE; }
@@ -232,7 +231,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> {
// If async IO is pending, the status of the request will be
// URLRequestStatus::IO_PENDING, and buf must remain available until the
// operation is completed. See comments on URLRequest::Read for more info.
- virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read);
// Informs the filter that data has been read into its buffer
void FilteredDataRead(int bytes_read);
@@ -290,7 +289,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> {
// processing the filtered data, we return the data in the caller's buffer.
// While the async IO is in progress, we save the user buffer here, and
// when the IO completes, we fill this in.
- net::IOBuffer *read_buffer_;
+ char *read_buffer_;
int read_buffer_len_;
// Used by HandleResponseIfNecessary to track whether we've sent the
diff --git a/net/url_request/url_request_simple_job.cc b/net/url_request/url_request_simple_job.cc
index a4ef4e1..ae078b3 100644
--- a/net/url_request/url_request_simple_job.cc
+++ b/net/url_request/url_request_simple_job.cc
@@ -29,13 +29,13 @@ bool URLRequestSimpleJob::GetCharset(std::string* charset) {
return true;
}
-bool URLRequestSimpleJob::ReadRawData(net::IOBuffer* buf, int buf_size,
+bool URLRequestSimpleJob::ReadRawData(char* buf, int buf_size,
int* bytes_read) {
DCHECK(bytes_read);
int remaining = static_cast<int>(data_.size()) - data_offset_;
if (buf_size > remaining)
buf_size = remaining;
- memcpy(buf->data(), data_.data() + data_offset_, buf_size);
+ memcpy(buf, data_.data() + data_offset_, buf_size);
data_offset_ += buf_size;
*bytes_read = buf_size;
return true;
diff --git a/net/url_request/url_request_simple_job.h b/net/url_request/url_request_simple_job.h
index 183598a..4cb847c 100644
--- a/net/url_request/url_request_simple_job.h
+++ b/net/url_request/url_request_simple_job.h
@@ -13,7 +13,7 @@ class URLRequestSimpleJob : public URLRequestJob {
URLRequestSimpleJob(URLRequest* request);
virtual void Start();
- virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read);
virtual bool GetMimeType(std::string* mime_type);
virtual bool GetCharset(std::string* charset);
diff --git a/net/url_request/url_request_test_job.cc b/net/url_request/url_request_test_job.cc
index eda77a7..d544ce4 100644
--- a/net/url_request/url_request_test_job.cc
+++ b/net/url_request/url_request_test_job.cc
@@ -93,8 +93,7 @@ void URLRequestTestJob::StartAsync() {
this->NotifyHeadersComplete();
}
-bool URLRequestTestJob::ReadRawData(net::IOBuffer* buf, int buf_size,
- int *bytes_read) {
+bool URLRequestTestJob::ReadRawData(char* buf, int buf_size, int *bytes_read) {
if (stage_ == WAITING) {
async_buf_ = buf;
async_buf_size_ = buf_size;
@@ -113,7 +112,7 @@ bool URLRequestTestJob::ReadRawData(net::IOBuffer* buf, int buf_size,
if (to_read + offset_ > static_cast<int>(data_.length()))
to_read = static_cast<int>(data_.length()) - offset_;
- memcpy(buf->data(), &data_.c_str()[offset_], to_read);
+ memcpy(buf, &data_.c_str()[offset_], to_read);
offset_ += to_read;
*bytes_read = to_read;
diff --git a/net/url_request/url_request_test_job.h b/net/url_request/url_request_test_job.h
index 4cbf37e..ad69123 100644
--- a/net/url_request/url_request_test_job.h
+++ b/net/url_request/url_request_test_job.h
@@ -51,7 +51,7 @@ class URLRequestTestJob : public URLRequestJob {
// Job functions
virtual void Start();
- virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read);
+ virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read);
virtual void Kill();
virtual bool GetMimeType(std::string* mime_type);
virtual void GetResponseInfo(net::HttpResponseInfo* info);
@@ -78,7 +78,7 @@ class URLRequestTestJob : public URLRequestJob {
int offset_;
// Holds the buffer for an asynchronous ReadRawData call
- net::IOBuffer* async_buf_;
+ char* async_buf_;
int async_buf_size_;
};
diff --git a/net/url_request/url_request_unittest.h b/net/url_request/url_request_unittest.h
index 9389ce8..58a0218 100644
--- a/net/url_request/url_request_unittest.h
+++ b/net/url_request/url_request_unittest.h
@@ -21,7 +21,6 @@
#include "base/thread.h"
#include "base/time.h"
#include "base/waitable_event.h"
-#include "net/base/io_buffer.h"
#include "net/base/net_errors.h"
#include "net/http/http_network_layer.h"
#include "net/url_request/url_request.h"
@@ -63,8 +62,7 @@ class TestDelegate : public URLRequest::Delegate {
received_bytes_count_(0),
received_redirect_count_(0),
received_data_before_response_(false),
- request_failed_(false),
- buf_(new net::IOBuffer(kBufferSize)) {
+ request_failed_(false) {
}
virtual void OnReceivedRedirect(URLRequest* request, const GURL& new_url) {
@@ -89,7 +87,7 @@ class TestDelegate : public URLRequest::Delegate {
} else {
// Initiate the first read.
int bytes_read = 0;
- if (request->Read(buf_, kBufferSize, &bytes_read))
+ if (request->Read(buf_, sizeof(buf_), &bytes_read))
OnReadCompleted(request, bytes_read);
else if (!request->status().is_io_pending())
OnResponseCompleted(request);
@@ -111,15 +109,15 @@ class TestDelegate : public URLRequest::Delegate {
received_bytes_count_ += bytes_read;
// consume the data
- data_received_.append(buf_->data(), bytes_read);
+ data_received_.append(buf_, bytes_read);
}
// If it was not end of stream, request to read more.
if (request->status().is_success() && bytes_read > 0) {
bytes_read = 0;
- while (request->Read(buf_, kBufferSize, &bytes_read)) {
+ while (request->Read(buf_, sizeof(buf_), &bytes_read)) {
if (bytes_read > 0) {
- data_received_.append(buf_->data(), bytes_read);
+ data_received_.append(buf_, bytes_read);
received_bytes_count_ += bytes_read;
} else {
break;
@@ -175,7 +173,6 @@ class TestDelegate : public URLRequest::Delegate {
bool request_failed() const { return request_failed_; }
private:
- static const int kBufferSize = 4096;
// options for controlling behavior
bool cancel_in_rr_;
bool cancel_in_rs_;
@@ -195,7 +192,7 @@ class TestDelegate : public URLRequest::Delegate {
std::string data_received_;
// our read buffer
- scoped_refptr<net::IOBuffer> buf_;
+ char buf_[4096];
};
// This object bounds the lifetime of an external python-based HTTP/FTP server