diff options
Diffstat (limited to 'net/url_request')
-rw-r--r-- | net/url_request/mime_sniffer_proxy.cc | 16 | ||||
-rw-r--r-- | net/url_request/mime_sniffer_proxy.h | 9 | ||||
-rw-r--r-- | net/url_request/url_request.cc | 2 | ||||
-rw-r--r-- | net/url_request/url_request.h | 21 | ||||
-rw-r--r-- | net/url_request/url_request_file_dir_job.cc | 8 | ||||
-rw-r--r-- | net/url_request/url_request_file_dir_job.h | 4 | ||||
-rw-r--r-- | net/url_request/url_request_file_job.cc | 6 | ||||
-rw-r--r-- | net/url_request/url_request_file_job.h | 2 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.cc | 3 | ||||
-rw-r--r-- | net/url_request/url_request_http_job.h | 2 | ||||
-rw-r--r-- | net/url_request/url_request_inet_job.cc | 4 | ||||
-rw-r--r-- | net/url_request/url_request_inet_job.h | 2 | ||||
-rw-r--r-- | net/url_request/url_request_job.cc | 10 | ||||
-rw-r--r-- | net/url_request/url_request_job.h | 7 | ||||
-rw-r--r-- | net/url_request/url_request_simple_job.cc | 4 | ||||
-rw-r--r-- | net/url_request/url_request_simple_job.h | 2 | ||||
-rw-r--r-- | net/url_request/url_request_test_job.cc | 5 | ||||
-rw-r--r-- | net/url_request/url_request_test_job.h | 4 | ||||
-rw-r--r-- | net/url_request/url_request_unittest.h | 15 |
19 files changed, 72 insertions, 54 deletions
diff --git a/net/url_request/mime_sniffer_proxy.cc b/net/url_request/mime_sniffer_proxy.cc index 24b1fbe..3a8d9a9 100644 --- a/net/url_request/mime_sniffer_proxy.cc +++ b/net/url_request/mime_sniffer_proxy.cc @@ -6,10 +6,13 @@ #include "net/base/mime_sniffer.h" +static const int kBufferSize = 1024; + MimeSnifferProxy::MimeSnifferProxy(URLRequest* request, URLRequest::Delegate* delegate) : request_(request), delegate_(delegate), - sniff_content_(false), error_(false) { + sniff_content_(false), error_(false), + buf_(new net::IOBuffer(kBufferSize)) { request->set_delegate(this); } @@ -20,7 +23,7 @@ void MimeSnifferProxy::OnResponseStarted(URLRequest* request) { // We need to read content before we know the mime type, // so we don't call OnResponseStarted. sniff_content_ = true; - if (request_->Read(buf_, sizeof(buf_), &bytes_read_) && bytes_read_) { + if (request_->Read(buf_, kBufferSize, &bytes_read_) && bytes_read_) { OnReadCompleted(request, bytes_read_); } else if (!request_->status().is_io_pending()) { error_ = true; @@ -32,7 +35,8 @@ void MimeSnifferProxy::OnResponseStarted(URLRequest* request) { delegate_->OnResponseStarted(request); } -bool MimeSnifferProxy::Read(char* buf, int max_bytes, int *bytes_read) { +bool MimeSnifferProxy::Read(net::IOBuffer* buf, int max_bytes, + int *bytes_read) { if (sniff_content_) { // This is the first call to Read() after we've sniffed content. // Return our local buffer or the error we ran into. @@ -43,7 +47,7 @@ bool MimeSnifferProxy::Read(char* buf, int max_bytes, int *bytes_read) { return false; } - memcpy(buf, buf_, bytes_read_); + memcpy(buf->data(), buf_->data(), bytes_read_); *bytes_read = bytes_read_; return true; } @@ -57,8 +61,8 @@ void MimeSnifferProxy::OnReadCompleted(URLRequest* request, int bytes_read) { std::string type_hint; request_->GetMimeType(&type_hint); bytes_read_ = bytes_read; - net::SniffMimeType( - buf_, bytes_read_, request_->url(), type_hint, &mime_type_); + net::SniffMimeType(buf_->data(), bytes_read_, request_->url(), + type_hint, &mime_type_); } else { error_ = true; } diff --git a/net/url_request/mime_sniffer_proxy.h b/net/url_request/mime_sniffer_proxy.h index 0029a80..898ea60 100644 --- a/net/url_request/mime_sniffer_proxy.h +++ b/net/url_request/mime_sniffer_proxy.h @@ -19,6 +19,10 @@ // 2) ms_->mime_type() -- returns the sniffed mime type of the data; // valid after OnResponseStarted() is called. +#ifndef NET_URL_REQUEST_MIME_SNIFFER_PROXY_H_ +#define NET_URL_REQUEST_MIME_SNIFFER_PROXY_H_ + +#include "net/base/io_buffer.h" #include "net/url_request/url_request.h" class MimeSnifferProxy : public URLRequest::Delegate { @@ -48,7 +52,7 @@ class MimeSnifferProxy : public URLRequest::Delegate { } // Wrapper around URLRequest::Read. - bool Read(char* buf, int max_bytes, int *bytes_read); + bool Read(net::IOBuffer* buf, int max_bytes, int *bytes_read); // Return the sniffed mime type of the request. Valid after // OnResponseStarted() has been called on the delegate. @@ -69,8 +73,9 @@ class MimeSnifferProxy : public URLRequest::Delegate { bool error_; // A buffer for the first bit of the request. - char buf_[1024]; + scoped_refptr<net::IOBuffer> buf_; // The number of bytes we've read into the buffer. int bytes_read_; }; +#endif // NET_URL_REQUEST_MIME_SNIFFER_PROXY_H_ diff --git a/net/url_request/url_request.cc b/net/url_request/url_request.cc index 87facba..19c9810 100644 --- a/net/url_request/url_request.cc +++ b/net/url_request/url_request.cc @@ -252,7 +252,7 @@ void URLRequest::CancelWithError(int os_error) { // about being called recursively. } -bool URLRequest::Read(char* dest, int dest_size, int *bytes_read) { +bool URLRequest::Read(net::IOBuffer* dest, int dest_size, int *bytes_read) { DCHECK(job_); DCHECK(bytes_read); DCHECK(!job_->is_done()); diff --git a/net/url_request/url_request.h b/net/url_request/url_request.h index 5dfc711..f697362 100644 --- a/net/url_request/url_request.h +++ b/net/url_request/url_request.h @@ -21,6 +21,9 @@ #include "net/url_request/url_request_context.h" #include "net/url_request/url_request_status.h" +namespace net { +class IOBuffer; +} class URLRequestJob; // This stores the values of the Set-Cookie headers received during the request. @@ -367,16 +370,14 @@ class URLRequest { // successful status. // If data is available, Read will return true, and the data and length will // be returned immediately. If data is not available, Read returns false, - // and an asynchronous Read is initiated. The caller guarantees the - // buffer provided will be available until the Read is finished. The - // Read is finished when the caller receives the OnReadComplete - // callback. OnReadComplete will be always be called, even if there - // was a failure. + // and an asynchronous Read is initiated. The Read is finished when + // the caller receives the OnReadComplete callback. OnReadComplete will be + // always be called, even if there was a failure. // - // The buf parameter is a buffer to receive the data. Once the read is - // initiated, the caller guarantees availability of this buffer until - // the OnReadComplete is received. The buffer must be at least - // max_bytes in length. + // The buf parameter is a buffer to receive the data. If the operation + // completes asynchronously, the implementation will reference the buffer + // until OnReadComplete is called. The buffer must be at least max_bytes in + // length. // // The max_bytes parameter is the maximum number of bytes to read. // @@ -386,7 +387,7 @@ class URLRequest { // // If a read error occurs, Read returns false and the request->status // will be set to an error. - bool Read(char* buf, int max_bytes, int *bytes_read); + bool Read(net::IOBuffer* buf, int max_bytes, int *bytes_read); // One of the following two methods should be called in response to an // OnAuthRequired() callback (and only then). diff --git a/net/url_request/url_request_file_dir_job.cc b/net/url_request/url_request_file_dir_job.cc index df24eab..1608684 100644 --- a/net/url_request/url_request_file_dir_job.cc +++ b/net/url_request/url_request_file_dir_job.cc @@ -26,7 +26,6 @@ URLRequestFileDirJob::URLRequestFileDirJob(URLRequest* request, list_complete_(false), wrote_header_(false), read_pending_(false), - read_buffer_(NULL), read_buffer_length_(0) { } @@ -68,7 +67,7 @@ void URLRequestFileDirJob::Kill() { lister_->Cancel(); } -bool URLRequestFileDirJob::ReadRawData(char* buf, int buf_size, +bool URLRequestFileDirJob::ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read) { DCHECK(bytes_read); *bytes_read = 0; @@ -76,7 +75,7 @@ bool URLRequestFileDirJob::ReadRawData(char* buf, int buf_size, if (is_done()) return true; - if (FillReadBuffer(buf, buf_size, bytes_read)) + if (FillReadBuffer(buf->data(), buf_size, bytes_read)) return true; // We are waiting for more data @@ -183,7 +182,8 @@ bool URLRequestFileDirJob::FillReadBuffer(char *buf, int buf_size, void URLRequestFileDirJob::CompleteRead() { if (read_pending_) { int bytes_read; - if (FillReadBuffer(read_buffer_, read_buffer_length_, &bytes_read)) { + if (FillReadBuffer(read_buffer_->data(), read_buffer_length_, + &bytes_read)) { // We completed the read, so reset the read buffer. read_pending_ = false; read_buffer_ = NULL; diff --git a/net/url_request/url_request_file_dir_job.h b/net/url_request/url_request_file_dir_job.h index 882f967..c3881dc 100644 --- a/net/url_request/url_request_file_dir_job.h +++ b/net/url_request/url_request_file_dir_job.h @@ -21,7 +21,7 @@ class URLRequestFileDirJob virtual void Start(); virtual void StartAsync(); virtual void Kill(); - virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read); + virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read); virtual bool GetMimeType(std::string* mime_type); virtual bool GetCharset(std::string* charset); virtual bool IsRedirectResponse(GURL* location, int* http_status_code); @@ -55,7 +55,7 @@ class URLRequestFileDirJob // we wait for IO to complete. When done, we fill the buffer // manually. bool read_pending_; - char *read_buffer_; + scoped_refptr<net::IOBuffer> read_buffer_; int read_buffer_length_; DISALLOW_EVIL_CONSTRUCTORS(URLRequestFileDirJob); diff --git a/net/url_request/url_request_file_job.cc b/net/url_request/url_request_file_job.cc index 994a58c..92e7f87 100644 --- a/net/url_request/url_request_file_job.cc +++ b/net/url_request/url_request_file_job.cc @@ -128,12 +128,12 @@ void URLRequestFileJob::Kill() { URLRequestJob::Kill(); } -bool URLRequestFileJob::ReadRawData( - char* dest, int dest_size, int *bytes_read) { +bool URLRequestFileJob::ReadRawData(net::IOBuffer* dest, int dest_size, + int *bytes_read) { DCHECK_NE(dest_size, 0); DCHECK(bytes_read); - int rv = stream_.Read(dest, dest_size, &io_callback_); + int rv = stream_.Read(dest->data(), dest_size, &io_callback_); if (rv >= 0) { // Data is immediately available. *bytes_read = rv; diff --git a/net/url_request/url_request_file_job.h b/net/url_request/url_request_file_job.h index a00e439..0ccaa5a 100644 --- a/net/url_request/url_request_file_job.h +++ b/net/url_request/url_request_file_job.h @@ -20,7 +20,7 @@ class URLRequestFileJob : public URLRequestJob { virtual void Start(); virtual void Kill(); - virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read); + virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read); virtual bool IsRedirectResponse(GURL* location, int* http_status_code); virtual bool GetMimeType(std::string* mime_type); diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc index 7728f6c..7a74500 100644 --- a/net/url_request/url_request_http_job.cc +++ b/net/url_request/url_request_http_job.cc @@ -348,7 +348,8 @@ bool URLRequestHttpJob::GetMoreData() { return transaction_.get() && !read_in_progress_; } -bool URLRequestHttpJob::ReadRawData(char* buf, int buf_size, int *bytes_read) { +bool URLRequestHttpJob::ReadRawData(net::IOBuffer* buf, int buf_size, + int *bytes_read) { DCHECK_NE(buf_size, 0); DCHECK(bytes_read); DCHECK(!read_in_progress_); diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h index eda4b4b..e53db48 100644 --- a/net/url_request/url_request_http_job.h +++ b/net/url_request/url_request_http_job.h @@ -53,7 +53,7 @@ class URLRequestHttpJob : public URLRequestJob { virtual void CancelAuth(); virtual void ContinueDespiteLastError(); virtual bool GetMoreData(); - virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read); + virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read); // Shadows URLRequestJob's version of this method so we can grab cookies. void NotifyHeadersComplete(); diff --git a/net/url_request/url_request_inet_job.cc b/net/url_request/url_request_inet_job.cc index 2d45526..09abfa2 100644 --- a/net/url_request/url_request_inet_job.cc +++ b/net/url_request/url_request_inet_job.cc @@ -185,7 +185,7 @@ void URLRequestInetJob::OnIOComplete(const AsyncResult& result) { } } -bool URLRequestInetJob::ReadRawData(char* dest, int dest_size, +bool URLRequestInetJob::ReadRawData(net::IOBuffer* dest, int dest_size, int *bytes_read) { if (is_done()) return 0; @@ -196,7 +196,7 @@ bool URLRequestInetJob::ReadRawData(char* dest, int dest_size, *bytes_read = 0; - int result = CallInternetRead(dest, dest_size, bytes_read); + int result = CallInternetRead(dest->data(), dest_size, bytes_read); if (result == ERROR_SUCCESS) { DLOG(INFO) << "read " << *bytes_read << " bytes"; if (*bytes_read == 0) diff --git a/net/url_request/url_request_inet_job.h b/net/url_request/url_request_inet_job.h index 6341105..bef0c4f 100644 --- a/net/url_request/url_request_inet_job.h +++ b/net/url_request/url_request_inet_job.h @@ -29,7 +29,7 @@ class URLRequestInetJob : public URLRequestJob { } virtual void Kill(); - virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read); + virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read); // URLRequestJob Authentication methods virtual void SetAuth(const std::wstring& username, diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc index 6e6a1df..659156b 100644 --- a/net/url_request/url_request_job.cc +++ b/net/url_request/url_request_job.cc @@ -8,6 +8,7 @@ #include "base/string_util.h" #include "googleurl/src/gurl.h" #include "net/base/auth.h" +#include "net/base/io_buffer.h" #include "net/base/net_errors.h" #include "net/url_request/url_request.h" #include "net/url_request/url_request_job_metrics.h" @@ -97,7 +98,7 @@ void URLRequestJob::ContinueDespiteLastError() { // This function calls ReadData to get stream data. If a filter exists, passes // the data to the attached filter. Then returns the output from filter back to // the caller. -bool URLRequestJob::Read(char* buf, int buf_size, int *bytes_read) { +bool URLRequestJob::Read(net::IOBuffer* buf, int buf_size, int *bytes_read) { bool rv = false; DCHECK_LT(buf_size, 1000000); // sanity check @@ -140,7 +141,7 @@ bool URLRequestJob::ReadRawDataForFilter(int *bytes_read) { // TODO(mbelshe): is it possible that the filter needs *MORE* data // when there is some data already in the buffer? if (!filter_->stream_data_len() && !is_done()) { - char* stream_buffer = filter_->stream_buffer(); + net::IOBuffer* stream_buffer = filter_->stream_buffer(); int stream_buffer_size = filter_->stream_buffer_size(); rv = ReadRawData(stream_buffer, stream_buffer_size, bytes_read); if (rv && *bytes_read > 0) @@ -186,7 +187,7 @@ bool URLRequestJob::ReadFilteredData(int *bytes_read) { // Get filtered data int filtered_data_len = read_buffer_len_; Filter::FilterStatus status; - status = filter_->ReadData(read_buffer_, &filtered_data_len); + status = filter_->ReadData(read_buffer_->data(), &filtered_data_len); switch (status) { case Filter::FILTER_DONE: { *bytes_read = filtered_data_len; @@ -242,7 +243,8 @@ bool URLRequestJob::ReadFilteredData(int *bytes_read) { return rv; } -bool URLRequestJob::ReadRawData(char* buf, int buf_size, int *bytes_read) { +bool URLRequestJob::ReadRawData(net::IOBuffer* buf, int buf_size, + int *bytes_read) { DCHECK(bytes_read); *bytes_read = 0; NotifyDone(URLRequestStatus()); diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h index 43fa866..0a5744b 100644 --- a/net/url_request/url_request_job.h +++ b/net/url_request/url_request_job.h @@ -17,6 +17,7 @@ namespace net { class HttpResponseInfo; +class IOBuffer; class UploadData; } @@ -78,7 +79,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> { // bytes read, 0 when there is no more data, or -1 if there was an error. // This is just the backend for URLRequest::Read, see that function for more // info. - bool Read(char* buf, int buf_size, int *bytes_read); + bool Read(net::IOBuffer* buf, int buf_size, int *bytes_read); // Called to fetch the current load state for the job. virtual net::LoadState GetLoadState() const { return net::LOAD_STATE_IDLE; } @@ -231,7 +232,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> { // If async IO is pending, the status of the request will be // URLRequestStatus::IO_PENDING, and buf must remain available until the // operation is completed. See comments on URLRequest::Read for more info. - virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read); + virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read); // Informs the filter that data has been read into its buffer void FilteredDataRead(int bytes_read); @@ -289,7 +290,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob> { // processing the filtered data, we return the data in the caller's buffer. // While the async IO is in progress, we save the user buffer here, and // when the IO completes, we fill this in. - char *read_buffer_; + net::IOBuffer *read_buffer_; int read_buffer_len_; // Used by HandleResponseIfNecessary to track whether we've sent the diff --git a/net/url_request/url_request_simple_job.cc b/net/url_request/url_request_simple_job.cc index ae078b3..a4ef4e1 100644 --- a/net/url_request/url_request_simple_job.cc +++ b/net/url_request/url_request_simple_job.cc @@ -29,13 +29,13 @@ bool URLRequestSimpleJob::GetCharset(std::string* charset) { return true; } -bool URLRequestSimpleJob::ReadRawData(char* buf, int buf_size, +bool URLRequestSimpleJob::ReadRawData(net::IOBuffer* buf, int buf_size, int* bytes_read) { DCHECK(bytes_read); int remaining = static_cast<int>(data_.size()) - data_offset_; if (buf_size > remaining) buf_size = remaining; - memcpy(buf, data_.data() + data_offset_, buf_size); + memcpy(buf->data(), data_.data() + data_offset_, buf_size); data_offset_ += buf_size; *bytes_read = buf_size; return true; diff --git a/net/url_request/url_request_simple_job.h b/net/url_request/url_request_simple_job.h index 4cb847c..183598a 100644 --- a/net/url_request/url_request_simple_job.h +++ b/net/url_request/url_request_simple_job.h @@ -13,7 +13,7 @@ class URLRequestSimpleJob : public URLRequestJob { URLRequestSimpleJob(URLRequest* request); virtual void Start(); - virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read); + virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read); virtual bool GetMimeType(std::string* mime_type); virtual bool GetCharset(std::string* charset); diff --git a/net/url_request/url_request_test_job.cc b/net/url_request/url_request_test_job.cc index d544ce4..eda77a7 100644 --- a/net/url_request/url_request_test_job.cc +++ b/net/url_request/url_request_test_job.cc @@ -93,7 +93,8 @@ void URLRequestTestJob::StartAsync() { this->NotifyHeadersComplete(); } -bool URLRequestTestJob::ReadRawData(char* buf, int buf_size, int *bytes_read) { +bool URLRequestTestJob::ReadRawData(net::IOBuffer* buf, int buf_size, + int *bytes_read) { if (stage_ == WAITING) { async_buf_ = buf; async_buf_size_ = buf_size; @@ -112,7 +113,7 @@ bool URLRequestTestJob::ReadRawData(char* buf, int buf_size, int *bytes_read) { if (to_read + offset_ > static_cast<int>(data_.length())) to_read = static_cast<int>(data_.length()) - offset_; - memcpy(buf, &data_.c_str()[offset_], to_read); + memcpy(buf->data(), &data_.c_str()[offset_], to_read); offset_ += to_read; *bytes_read = to_read; diff --git a/net/url_request/url_request_test_job.h b/net/url_request/url_request_test_job.h index ad69123..4cbf37e 100644 --- a/net/url_request/url_request_test_job.h +++ b/net/url_request/url_request_test_job.h @@ -51,7 +51,7 @@ class URLRequestTestJob : public URLRequestJob { // Job functions virtual void Start(); - virtual bool ReadRawData(char* buf, int buf_size, int *bytes_read); + virtual bool ReadRawData(net::IOBuffer* buf, int buf_size, int *bytes_read); virtual void Kill(); virtual bool GetMimeType(std::string* mime_type); virtual void GetResponseInfo(net::HttpResponseInfo* info); @@ -78,7 +78,7 @@ class URLRequestTestJob : public URLRequestJob { int offset_; // Holds the buffer for an asynchronous ReadRawData call - char* async_buf_; + net::IOBuffer* async_buf_; int async_buf_size_; }; diff --git a/net/url_request/url_request_unittest.h b/net/url_request/url_request_unittest.h index 58a0218..9389ce8 100644 --- a/net/url_request/url_request_unittest.h +++ b/net/url_request/url_request_unittest.h @@ -21,6 +21,7 @@ #include "base/thread.h" #include "base/time.h" #include "base/waitable_event.h" +#include "net/base/io_buffer.h" #include "net/base/net_errors.h" #include "net/http/http_network_layer.h" #include "net/url_request/url_request.h" @@ -62,7 +63,8 @@ class TestDelegate : public URLRequest::Delegate { received_bytes_count_(0), received_redirect_count_(0), received_data_before_response_(false), - request_failed_(false) { + request_failed_(false), + buf_(new net::IOBuffer(kBufferSize)) { } virtual void OnReceivedRedirect(URLRequest* request, const GURL& new_url) { @@ -87,7 +89,7 @@ class TestDelegate : public URLRequest::Delegate { } else { // Initiate the first read. int bytes_read = 0; - if (request->Read(buf_, sizeof(buf_), &bytes_read)) + if (request->Read(buf_, kBufferSize, &bytes_read)) OnReadCompleted(request, bytes_read); else if (!request->status().is_io_pending()) OnResponseCompleted(request); @@ -109,15 +111,15 @@ class TestDelegate : public URLRequest::Delegate { received_bytes_count_ += bytes_read; // consume the data - data_received_.append(buf_, bytes_read); + data_received_.append(buf_->data(), bytes_read); } // If it was not end of stream, request to read more. if (request->status().is_success() && bytes_read > 0) { bytes_read = 0; - while (request->Read(buf_, sizeof(buf_), &bytes_read)) { + while (request->Read(buf_, kBufferSize, &bytes_read)) { if (bytes_read > 0) { - data_received_.append(buf_, bytes_read); + data_received_.append(buf_->data(), bytes_read); received_bytes_count_ += bytes_read; } else { break; @@ -173,6 +175,7 @@ class TestDelegate : public URLRequest::Delegate { bool request_failed() const { return request_failed_; } private: + static const int kBufferSize = 4096; // options for controlling behavior bool cancel_in_rr_; bool cancel_in_rs_; @@ -192,7 +195,7 @@ class TestDelegate : public URLRequest::Delegate { std::string data_received_; // our read buffer - char buf_[4096]; + scoped_refptr<net::IOBuffer> buf_; }; // This object bounds the lifetime of an external python-based HTTP/FTP server |