summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--net/url_request/url_request.cc2
-rw-r--r--net/url_request/url_request.h4
-rw-r--r--net/url_request/url_request_job.cc8
-rw-r--r--net/url_request/url_request_job.h2
4 files changed, 7 insertions, 9 deletions
diff --git a/net/url_request/url_request.cc b/net/url_request/url_request.cc
index c2a8d28..3198e82 100644
--- a/net/url_request/url_request.cc
+++ b/net/url_request/url_request.cc
@@ -339,7 +339,7 @@ void URLRequest::DoCancel(int os_error, const net::SSLInfo& ssl_info) {
// about being called recursively.
}
-bool URLRequest::Read(net::IOBuffer* dest, int dest_size, int *bytes_read) {
+bool URLRequest::Read(net::IOBuffer* dest, int dest_size, int* bytes_read) {
DCHECK(job_);
DCHECK(bytes_read);
DCHECK(!job_->is_done());
diff --git a/net/url_request/url_request.h b/net/url_request/url_request.h
index 23e2f6f..68b822c 100644
--- a/net/url_request/url_request.h
+++ b/net/url_request/url_request.h
@@ -487,7 +487,7 @@ class URLRequest {
//
// If a read error occurs, Read returns false and the request->status
// will be set to an error.
- bool Read(net::IOBuffer* buf, int max_bytes, int *bytes_read);
+ bool Read(net::IOBuffer* buf, int max_bytes, int* bytes_read);
// If this request is being cached by the HTTP cache, stop subsequent caching.
// Note that this method has no effect on other (simultaneous or not) requests
@@ -581,7 +581,7 @@ class URLRequest {
// Restarting involves replacing the current job with a new one such as what
// happens when following a HTTP redirect.
- void RestartWithJob(URLRequestJob *job);
+ void RestartWithJob(URLRequestJob* job);
void PrepareToRestart();
// Detaches the job from this request in preparation for this object going
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc
index 01f9a1c..486d8c0 100644
--- a/net/url_request/url_request_job.cc
+++ b/net/url_request/url_request_job.cc
@@ -31,7 +31,6 @@ URLRequestJob::URLRequestJob(URLRequest* request)
is_compressed_(false),
done_(false),
filter_needs_more_output_space_(false),
- read_buffer_(NULL),
read_buffer_len_(0),
has_handled_response_(false),
expected_content_size_(-1),
@@ -199,7 +198,7 @@ void URLRequestJob::StopCaching() {
// Nothing to do here.
}
-bool URLRequestJob::ReadRawDataForFilter(int *bytes_read) {
+bool URLRequestJob::ReadRawDataForFilter(int* bytes_read) {
bool rv = false;
DCHECK(bytes_read);
@@ -233,7 +232,7 @@ void URLRequestJob::FilteredDataRead(int bytes_read) {
filter_->FlushStreamBuffer(bytes_read);
}
-bool URLRequestJob::ReadFilteredData(int *bytes_read) {
+bool URLRequestJob::ReadFilteredData(int* bytes_read) {
DCHECK(filter_.get()); // don't add data if there is no filter
DCHECK(read_buffer_ != NULL); // we need to have a buffer to fill
DCHECK_GT(read_buffer_len_, 0); // sanity check
@@ -330,8 +329,7 @@ bool URLRequestJob::ReadFilteredData(int *bytes_read) {
if (rv) {
// When we successfully finished a read, we no longer need to
- // save the caller's buffers. For debugging purposes, we clear
- // them out.
+ // save the caller's buffers. Release our reference.
read_buffer_ = NULL;
read_buffer_len_ = 0;
}
diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h
index 9f7eb5e..c0916e8 100644
--- a/net/url_request/url_request_job.h
+++ b/net/url_request/url_request_job.h
@@ -354,7 +354,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>,
// processing the filtered data, we return the data in the caller's buffer.
// While the async IO is in progress, we save the user buffer here, and
// when the IO completes, we fill this in.
- net::IOBuffer *read_buffer_;
+ scoped_refptr<net::IOBuffer> read_buffer_;
int read_buffer_len_;
// Used by HandleResponseIfNecessary to track whether we've sent the