diff options
author | adamk@chromium.org <adamk@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-03-16 20:17:13 +0000 |
---|---|---|
committer | adamk@chromium.org <adamk@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-03-16 20:17:13 +0000 |
commit | ce8bac3bf86d9a038a189c6b37808572525e26a5 (patch) | |
tree | 9a64a699e43711c461c2cff65813842b37275693 /net | |
parent | 72279de2022a9c5e862963baf000b54ec559d8ed (diff) | |
download | chromium_src-ce8bac3bf86d9a038a189c6b37808572525e26a5.zip chromium_src-ce8bac3bf86d9a038a189c6b37808572525e26a5.tar.gz chromium_src-ce8bac3bf86d9a038a189c6b37808572525e26a5.tar.bz2 |
Remove URLRequestJobMetrics and related code.
It's apparently no longer used.
R=eroman@chromium.org
BUG=none
TEST=try bots
Review URL: http://codereview.chromium.org/6672036
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@78417 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net')
-rw-r--r-- | net/net.gyp | 2 | ||||
-rw-r--r-- | net/url_request/url_request.cc | 1 | ||||
-rw-r--r-- | net/url_request/url_request.h | 9 | ||||
-rw-r--r-- | net/url_request/url_request_job.cc | 25 | ||||
-rw-r--r-- | net/url_request/url_request_job.h | 7 | ||||
-rw-r--r-- | net/url_request/url_request_job_metrics.cc | 46 | ||||
-rw-r--r-- | net/url_request/url_request_job_metrics.h | 54 |
7 files changed, 0 insertions, 144 deletions
diff --git a/net/net.gyp b/net/net.gyp index 730ad4a..5d3ca045 100644 --- a/net/net.gyp +++ b/net/net.gyp @@ -719,8 +719,6 @@ 'url_request/url_request_job.h', 'url_request/url_request_job_manager.cc', 'url_request/url_request_job_manager.h', - 'url_request/url_request_job_metrics.cc', - 'url_request/url_request_job_metrics.h', 'url_request/url_request_job_tracker.cc', 'url_request/url_request_job_tracker.h', 'url_request/url_request_netlog_params.cc', diff --git a/net/url_request/url_request.cc b/net/url_request/url_request.cc index d4c1542..7b792a5 100644 --- a/net/url_request/url_request.cc +++ b/net/url_request/url_request.cc @@ -117,7 +117,6 @@ URLRequest::URLRequest(const GURL& url, Delegate* delegate) load_flags_(net::LOAD_NORMAL), delegate_(delegate), is_pending_(false), - enable_profiling_(false), redirect_limit_(kMaxRedirects), final_upload_progress_(0), priority_(net::LOWEST), diff --git a/net/url_request/url_request.h b/net/url_request/url_request.h index 2c58db1..9bccfb2 100644 --- a/net/url_request/url_request.h +++ b/net/url_request/url_request.h @@ -536,12 +536,6 @@ class URLRequest : public base::NonThreadSafe { // cancel the request instead, call Cancel(). void ContinueDespiteLastError(); - // Returns true if performance profiling should be enabled on the - // URLRequestJob serving this request. - bool enable_profiling() const { return enable_profiling_; } - - void set_enable_profiling(bool profiling) { enable_profiling_ = profiling; } - // Used to specify the context (cookie store, cache) for this request. URLRequestContext* context(); void set_context(URLRequestContext* context); @@ -649,9 +643,6 @@ class URLRequest : public base::NonThreadSafe { // Externally-defined data accessible by key UserDataMap user_data_; - // Whether to enable performance profiling on the job serving this request. - bool enable_profiling_; - // Number of times we're willing to redirect. Used to guard against // infinite redirects. int redirect_limit_; diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc index 7208614..59e63b3 100644 --- a/net/url_request/url_request_job.cc +++ b/net/url_request/url_request_job.cc @@ -18,7 +18,6 @@ #include "net/http/http_response_headers.h" #include "net/url_request/url_request.h" #include "net/url_request/url_request_context.h" -#include "net/url_request/url_request_job_metrics.h" #include "net/url_request/url_request_job_tracker.h" using base::Time; @@ -30,7 +29,6 @@ URLRequestJob::URLRequestJob(URLRequest* request) : request_(request), done_(false), load_flags_(request_->load_flags()), - is_profiling_(request_->enable_profiling()), prefilter_bytes_read_(0), postfilter_bytes_read_(0), is_compressible_content_(false), @@ -45,10 +43,6 @@ URLRequestJob::URLRequestJob(URLRequest* request) bytes_observed_in_packets_(0), max_packets_timed_(0), observed_packet_count_(0) { - if (is_profiling_) { - metrics_.reset(new URLRequestJobMetrics()); - metrics_->start_time_ = TimeTicks::Now(); - } g_url_request_job_tracker.AddNewJob(this); } @@ -555,21 +549,6 @@ void URLRequestJob::NotifyDone(const URLRequestStatus &status) { RecordCompressionHistograms(); - if (is_profiling_ && metrics_->total_bytes_read_ > 0) { - // There are valid IO statistics. Fill in other fields of metrics for - // profiling consumers to retrieve information. - metrics_->original_url_.reset(new GURL(request_->original_url())); - metrics_->end_time_ = TimeTicks::Now(); - metrics_->success_ = status.is_success(); - - if (!(request_->original_url() == request_->url())) { - metrics_->url_.reset(new GURL(request_->url())); - } - } else { - metrics_.reset(); - } - - // Unless there was an error, we should have at least tried to handle // the response before getting here. DCHECK(has_handled_response_ || !status.is_success()); @@ -823,10 +802,6 @@ void URLRequestJob::OnRawReadComplete(int bytes_read) { } void URLRequestJob::RecordBytesRead(int bytes_read) { - if (is_profiling_) { - ++(metrics_->number_of_read_IO_); - metrics_->total_bytes_read_ += bytes_read; - } filter_input_byte_count_ += bytes_read; UpdatePacketReadTimes(); // Facilitate stats recording if it is active. g_url_request_job_tracker.OnBytesRead(this, raw_read_buffer_->data(), diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h index d47f734..3b65f8b 100644 --- a/net/url_request/url_request_job.h +++ b/net/url_request/url_request_job.h @@ -25,7 +25,6 @@ class HttpRequestHeaders; class HttpResponseInfo; class IOBuffer; class URLRequest; -class URLRequestJobMetrics; class UploadData; class URLRequestStatus; class X509Certificate; @@ -319,12 +318,6 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>, // Cache the load flags from request_ because it might go away. int load_flags_; - // Whether the job is doing performance profiling - bool is_profiling_; - - // Contains IO performance measurement when profiling is enabled. - scoped_ptr<URLRequestJobMetrics> metrics_; - // The number of bytes read before passing to the filter. int prefilter_bytes_read_; // The number of bytes read after passing through the filter. diff --git a/net/url_request/url_request_job_metrics.cc b/net/url_request/url_request_job_metrics.cc deleted file mode 100644 index 87ef205..0000000 --- a/net/url_request/url_request_job_metrics.cc +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) 2010 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "net/url_request/url_request_job_metrics.h" - -#include "base/basictypes.h" -#include "base/string_util.h" -#include "base/utf_string_conversions.h" - -namespace net { - -URLRequestJobMetrics::URLRequestJobMetrics() - : total_bytes_read_(0), - number_of_read_IO_(0), - success_(false) { -} - -URLRequestJobMetrics::~URLRequestJobMetrics() {} - -void URLRequestJobMetrics::AppendText(std::wstring* text) { - if (!text) - return; - - text->append(L"job url = "); - text->append(UTF8ToWide(original_url_->spec())); - - if (url_.get()) { - text->append(L"; redirected url = "); - text->append(UTF8ToWide(url_->spec())); - } - - base::TimeDelta elapsed = end_time_ - start_time_; - base::StringAppendF(text, - L"; total bytes read = %ld; read calls = %d; time = %lld ms;", - static_cast<long>(total_bytes_read_), - number_of_read_IO_, elapsed.InMilliseconds()); - - if (success_) { - text->append(L" success."); - } else { - text->append(L" fail."); - } -} - -} // namespace net diff --git a/net/url_request/url_request_job_metrics.h b/net/url_request/url_request_job_metrics.h deleted file mode 100644 index 6552026..0000000 --- a/net/url_request/url_request_job_metrics.h +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) 2010 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Records IO statistics associated with a net::URLRequestJob. -// See description in navigation_profiler.h for an overview of perf profiling. - -#ifndef NET_URL_REQUEST_URL_REQUEST_JOB_METRICS_H_ -#define NET_URL_REQUEST_URL_REQUEST_JOB_METRICS_H_ -#pragma once - -#include <string> - -#include "base/basictypes.h" -#include "base/scoped_ptr.h" -#include "base/time.h" -#include "googleurl/src/gurl.h" - -namespace net { - -class URLRequestJobMetrics { - public: - URLRequestJobMetrics(); - ~URLRequestJobMetrics(); - - // Append the text report of the frame loading to the input string. - void AppendText(std::wstring* text); - - // The original url the job has been created for. - scoped_ptr<GURL> original_url_; - - // The actual url the job connects to. If the actual url is same as the - // original url, url_ is empty. - scoped_ptr<GURL> url_; - - // Time when the job starts. - base::TimeTicks start_time_; - - // Time when the job is done. - base::TimeTicks end_time_; - - // Total number of bytes the job reads from underline IO. - int64 total_bytes_read_; - - // Number of IO read operations the job issues. - int number_of_read_IO_; - - // Final status of the job. - bool success_; -}; - -} // namespace net - -#endif // NET_URL_REQUEST_URL_REQUEST_JOB_METRICS_H_ |