diff options
author | adamk@chromium.org <adamk@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-03-16 01:36:44 +0000 |
---|---|---|
committer | adamk@chromium.org <adamk@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-03-16 01:36:44 +0000 |
commit | 0da15e586f92f3488658034bfef5ecbf42656791 (patch) | |
tree | 08bfc880c0fcea7ca08252e20bea29ba6a6d25f6 | |
parent | 81ad7f4a9c5d78359e90ebfa0aa130791a010226 (diff) | |
download | chromium_src-0da15e586f92f3488658034bfef5ecbf42656791.zip chromium_src-0da15e586f92f3488658034bfef5ecbf42656791.tar.gz chromium_src-0da15e586f92f3488658034bfef5ecbf42656791.tar.bz2 |
Various small cleanups in URLRequestJob:
- Remove unused or unneeded methods.
- Make (almost) all data private.
R=eroman@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/6697035
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@78319 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r-- | net/url_request/url_request_job.cc | 21 | ||||
-rw-r--r-- | net/url_request/url_request_job.h | 41 |
2 files changed, 23 insertions, 39 deletions
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc index ed01f0e..7208614 100644 --- a/net/url_request/url_request_job.cc +++ b/net/url_request/url_request_job.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2010 The Chromium Authors. All rights reserved. +// Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -28,11 +28,13 @@ namespace net { URLRequestJob::URLRequestJob(URLRequest* request) : request_(request), + done_(false), + load_flags_(request_->load_flags()), + is_profiling_(request_->enable_profiling()), prefilter_bytes_read_(0), postfilter_bytes_read_(0), is_compressible_content_(false), is_compressed_(false), - done_(false), filter_needs_more_output_space_(false), filtered_read_buffer_len_(0), has_handled_response_(false), @@ -43,9 +45,7 @@ URLRequestJob::URLRequestJob(URLRequest* request) bytes_observed_in_packets_(0), max_packets_timed_(0), observed_packet_count_(0) { - load_flags_ = request_->load_flags(); - is_profiling_ = request->enable_profiling(); - if (is_profiling()) { + if (is_profiling_) { metrics_.reset(new URLRequestJobMetrics()); metrics_->start_time_ = TimeTicks::Now(); } @@ -215,13 +215,6 @@ void URLRequestJob::FollowDeferredRedirect() { FollowRedirect(redirect_url, redirect_status_code); } -URLRequestJobMetrics* URLRequestJob::RetrieveMetrics() { - if (is_profiling()) - return metrics_.release(); - else - return NULL; -} - bool URLRequestJob::GetMimeType(std::string* mime_type) const { return false; } @@ -562,7 +555,7 @@ void URLRequestJob::NotifyDone(const URLRequestStatus &status) { RecordCompressionHistograms(); - if (is_profiling() && metrics_->total_bytes_read_ > 0) { + if (is_profiling_ && metrics_->total_bytes_read_ > 0) { // There are valid IO statistics. Fill in other fields of metrics for // profiling consumers to retrieve information. metrics_->original_url_.reset(new GURL(request_->original_url())); @@ -830,7 +823,7 @@ void URLRequestJob::OnRawReadComplete(int bytes_read) { } void URLRequestJob::RecordBytesRead(int bytes_read) { - if (is_profiling()) { + if (is_profiling_) { ++(metrics_->number_of_read_IO_); metrics_->total_bytes_read_ += bytes_read; } diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h index 3d29201..d47f734 100644 --- a/net/url_request/url_request_job.h +++ b/net/url_request/url_request_job.h @@ -18,7 +18,6 @@ #include "net/base/host_port_pair.h" #include "net/base/load_states.h" - namespace net { class AuthChallengeInfo; @@ -180,14 +179,6 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>, // NotifyDone on the request. bool is_done() const { return done_; } - // Returns true if the job is doing performance profiling - bool is_profiling() const { return is_profiling_; } - - // Retrieve the performance measurement of the job. The data is encapsulated - // with a URLRequestJobMetrics object. The caller owns this object from now - // on. - URLRequestJobMetrics* RetrieveMetrics(); - // Get/Set expected content size int64 expected_content_size() const { return expected_content_size_; } void set_expected_content_size(const int64& size) { @@ -286,22 +277,6 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>, // request was released by DetachRequest(). net::URLRequest* request_; - // Whether the job is doing performance profiling - bool is_profiling_; - - // Contains IO performance measurement when profiling is enabled. - scoped_ptr<URLRequestJobMetrics> metrics_; - - // The number of bytes read before passing to the filter. - int prefilter_bytes_read_; - // The number of bytes read after passing through the filter. - int postfilter_bytes_read_; - // True when (we believe) the content in this net::URLRequest was - // compressible. - bool is_compressible_content_; - // True when the content in this net::URLRequest was compressed. - bool is_compressed_; - private: // When data filtering is enabled, this function is used to read data // for the filter. Returns true if raw data was read. Returns false if @@ -344,6 +319,22 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>, // Cache the load flags from request_ because it might go away. int load_flags_; + // Whether the job is doing performance profiling + bool is_profiling_; + + // Contains IO performance measurement when profiling is enabled. + scoped_ptr<URLRequestJobMetrics> metrics_; + + // The number of bytes read before passing to the filter. + int prefilter_bytes_read_; + // The number of bytes read after passing through the filter. + int postfilter_bytes_read_; + // True when (we believe) the content in this net::URLRequest was + // compressible. + bool is_compressible_content_; + // True when the content in this net::URLRequest was compressed. + bool is_compressed_; + // The data stream filter which is enabled on demand. scoped_ptr<Filter> filter_; |