summaryrefslogtreecommitdiffstats
path: root/net/url_request
diff options
context:
space:
mode:
authoradamk@chromium.org <adamk@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-03-17 19:06:01 +0000
committeradamk@chromium.org <adamk@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-03-17 19:06:01 +0000
commitfc01f237e02ab8b4b786031cf59c54aba31d3942 (patch)
tree72d99fc1e5de6512fe63fef571646ef885246d47 /net/url_request
parent910b3d75ffc13e80aa254cc6610c740ce1aa6090 (diff)
downloadchromium_src-fc01f237e02ab8b4b786031cf59c54aba31d3942.zip
chromium_src-fc01f237e02ab8b4b786031cf59c54aba31d3942.tar.gz
chromium_src-fc01f237e02ab8b4b786031cf59c54aba31d3942.tar.bz2
Stop subclassing FilterContext in URLRequestJob.
Create a wrapper for URLRequestHttpJob that subclasses FilterContext, and pass an instance of this facade to Filter::Factory(). Reduce the scope of URLRequestJob's interface as much as possible, moving methods into URLRequestHttpJob or its facade as appropriate, and making methods that remain in URLRequestJob non-virtual where possible. BUG=none TEST=net_unittests,try bots Review URL: http://codereview.chromium.org/6677104 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@78576 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r--net/url_request/url_request_http_job.cc62
-rw-r--r--net/url_request/url_request_http_job.h24
-rw-r--r--net/url_request/url_request_job.cc41
-rw-r--r--net/url_request/url_request_job.h24
4 files changed, 96 insertions, 55 deletions
diff --git a/net/url_request/url_request_http_job.cc b/net/url_request/url_request_http_job.cc
index 480833a..ab3e947 100644
--- a/net/url_request/url_request_http_job.cc
+++ b/net/url_request/url_request_http_job.cc
@@ -80,6 +80,55 @@ class HTTPSProberDelegateImpl : public HTTPSProberDelegate {
} // namespace
+URLRequestHttpJob::HttpFilterContext::HttpFilterContext(URLRequestHttpJob* job)
+ : job_(job) {
+ DCHECK(job_);
+}
+
+URLRequestHttpJob::HttpFilterContext::~HttpFilterContext() {
+}
+
+bool URLRequestHttpJob::HttpFilterContext::GetMimeType(
+ std::string* mime_type) const {
+ return job_->GetMimeType(mime_type);
+}
+
+bool URLRequestHttpJob::HttpFilterContext::GetURL(GURL* gurl) const {
+ if (!job_->request())
+ return false;
+ *gurl = job_->request()->url();
+ return true;
+}
+
+base::Time URLRequestHttpJob::HttpFilterContext::GetRequestTime() const {
+ return job_->request() ? job_->request()->request_time() : base::Time();
+}
+
+bool URLRequestHttpJob::HttpFilterContext::IsCachedContent() const {
+ return job_->IsCachedContent();
+}
+
+bool URLRequestHttpJob::HttpFilterContext::IsDownload() const {
+ return (job_->request_info_.load_flags & LOAD_IS_DOWNLOAD) != 0;
+}
+
+bool URLRequestHttpJob::HttpFilterContext::IsSdchResponse() const {
+ return job_->sdch_dictionary_advertised_;
+}
+
+int64 URLRequestHttpJob::HttpFilterContext::GetByteReadCount() const {
+ return job_->filter_input_byte_count();
+}
+
+int URLRequestHttpJob::HttpFilterContext::GetResponseCode() const {
+ return job_->GetResponseCode();
+}
+
+void URLRequestHttpJob::HttpFilterContext::RecordPacketStats(
+ StatisticSelector statistic) const {
+ job_->RecordPacketStats(statistic);
+}
+
// TODO(darin): make sure the port blocking code is not lost
// static
URLRequestJob* URLRequestHttpJob::Factory(URLRequest* request,
@@ -143,6 +192,7 @@ URLRequestHttpJob::URLRequestHttpJob(URLRequest* request)
sdch_test_control_(false),
is_cached_content_(false),
request_creation_time_(),
+ ALLOW_THIS_IN_INITIALIZER_LIST(filter_context_(this)),
ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) {
ResetTimer();
}
@@ -748,20 +798,16 @@ Filter* URLRequestHttpJob::SetupFilter() const {
// some decoding, as some proxies strip encoding completely. In such cases,
// we may need to add (for example) SDCH filtering (when the context suggests
// it is appropriate).
- Filter::FixupEncodingTypes(*this, &encoding_types);
+ Filter::FixupEncodingTypes(filter_context_, &encoding_types);
return !encoding_types.empty()
- ? Filter::Factory(encoding_types, *this) : NULL;
+ ? Filter::Factory(encoding_types, filter_context_) : NULL;
}
bool URLRequestHttpJob::IsCachedContent() const {
return is_cached_content_;
}
-bool URLRequestHttpJob::IsSdchResponse() const {
- return sdch_dictionary_advertised_;
-}
-
bool URLRequestHttpJob::IsSafeRedirect(const GURL& location) {
// We only allow redirects to certain "safe" protocols. This does not
// restrict redirects to externally handled protocols. Our consumer would
@@ -948,9 +994,9 @@ URLRequestHttpJob::~URLRequestHttpJob() {
DCHECK(!sdch_test_control_ || !sdch_test_activated_);
if (!IsCachedContent()) {
if (sdch_test_control_)
- RecordPacketStats(SDCH_EXPERIMENT_HOLDBACK);
+ RecordPacketStats(FilterContext::SDCH_EXPERIMENT_HOLDBACK);
if (sdch_test_activated_)
- RecordPacketStats(SDCH_EXPERIMENT_DECODE);
+ RecordPacketStats(FilterContext::SDCH_EXPERIMENT_DECODE);
}
// Make sure SDCH filters are told to emit histogram data while this class
// can still service the IsCachedContent() call.
diff --git a/net/url_request/url_request_http_job.h b/net/url_request/url_request_http_job.h
index 6f742ec..84bf038 100644
--- a/net/url_request/url_request_http_job.h
+++ b/net/url_request/url_request_http_job.h
@@ -74,7 +74,6 @@ class URLRequestHttpJob : public URLRequestJob {
virtual int GetResponseCode() const;
virtual Filter* SetupFilter() const;
virtual bool IsCachedContent() const;
- virtual bool IsSdchResponse() const;
virtual bool IsSafeRedirect(const GURL& location);
virtual bool NeedsAuth();
virtual void GetAuthChallengeInfo(scoped_refptr<AuthChallengeInfo>*);
@@ -135,12 +134,35 @@ class URLRequestHttpJob : public URLRequestJob {
bool is_cached_content_;
private:
+ class HttpFilterContext : public FilterContext {
+ public:
+ explicit HttpFilterContext(URLRequestHttpJob* job);
+ virtual ~HttpFilterContext();
+
+ // net::FilterContext implementation.
+ virtual bool GetMimeType(std::string* mime_type) const;
+ virtual bool GetURL(GURL* gurl) const;
+ virtual base::Time GetRequestTime() const;
+ virtual bool IsCachedContent() const;
+ virtual bool IsDownload() const;
+ virtual bool IsSdchResponse() const;
+ virtual int64 GetByteReadCount() const;
+ virtual int GetResponseCode() const;
+ virtual void RecordPacketStats(StatisticSelector statistic) const;
+
+ private:
+ URLRequestHttpJob* job_;
+
+ DISALLOW_COPY_AND_ASSIGN(HttpFilterContext);
+ };
+
virtual ~URLRequestHttpJob();
void RecordTimer();
void ResetTimer();
base::Time request_creation_time_;
+ HttpFilterContext filter_context_;
ScopedRunnableMethodFactory<URLRequestHttpJob> method_factory_;
DISALLOW_COPY_AND_ASSIGN(URLRequestHttpJob);
diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc
index 67031ce..5986515 100644
--- a/net/url_request/url_request_job.cc
+++ b/net/url_request/url_request_job.cc
@@ -11,7 +11,7 @@
#include "net/base/auth.h"
#include "net/base/host_port_pair.h"
#include "net/base/io_buffer.h"
-#include "net/base/load_flags.h"
+#include "net/base/load_states.h"
#include "net/base/mime_util.h"
#include "net/base/net_errors.h"
#include "net/base/network_delegate.h"
@@ -28,7 +28,6 @@ namespace net {
URLRequestJob::URLRequestJob(URLRequest* request)
: request_(request),
done_(false),
- load_flags_(request_->load_flags()),
prefilter_bytes_read_(0),
postfilter_bytes_read_(0),
is_compressible_content_(false),
@@ -205,40 +204,16 @@ bool URLRequestJob::GetMimeType(std::string* mime_type) const {
return false;
}
-bool URLRequestJob::GetURL(GURL* gurl) const {
- if (!request_)
- return false;
- *gurl = request_->url();
- return true;
-}
-
-base::Time URLRequestJob::GetRequestTime() const {
- if (!request_)
- return base::Time();
- return request_->request_time();
-}
-
-bool URLRequestJob::IsDownload() const {
- return (load_flags_ & net::LOAD_IS_DOWNLOAD) != 0;
-}
-
-bool URLRequestJob::IsSdchResponse() const {
- return false;
-}
-
bool URLRequestJob::IsCachedContent() const {
return false;
}
-int64 URLRequestJob::GetByteReadCount() const {
- return filter_input_byte_count_;
-}
-
int URLRequestJob::GetResponseCode() const {
return -1;
}
-void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
+void URLRequestJob::RecordPacketStats(
+ FilterContext::StatisticSelector statistic) const {
if (!packet_timing_enabled_ || (final_packet_time_ == base::Time()))
return;
@@ -250,7 +225,7 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
base::TimeDelta duration = final_packet_time_ - request_time_snapshot_;
switch (statistic) {
- case SDCH_DECODE: {
+ case FilterContext::SDCH_DECODE: {
UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Decode_Latency_F_a", duration,
base::TimeDelta::FromMilliseconds(20),
base::TimeDelta::FromMinutes(10), 100);
@@ -287,7 +262,7 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
base::TimeDelta::FromSeconds(10), 100);
return;
}
- case SDCH_PASSTHROUGH: {
+ case FilterContext::SDCH_PASSTHROUGH: {
// Despite advertising a dictionary, we handled non-sdch compressed
// content.
UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Network_Pass-through_Latency_F_a",
@@ -325,7 +300,7 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
return;
}
- case SDCH_EXPERIMENT_DECODE: {
+ case FilterContext::SDCH_EXPERIMENT_DECODE: {
UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Decode",
duration,
base::TimeDelta::FromMilliseconds(20),
@@ -334,7 +309,7 @@ void URLRequestJob::RecordPacketStats(StatisticSelector statistic) const {
// case, so we don't need them here.
return;
}
- case SDCH_EXPERIMENT_HOLDBACK: {
+ case FilterContext::SDCH_EXPERIMENT_HOLDBACK: {
UMA_HISTOGRAM_CLIPPED_TIMES("Sdch3.Experiment_Holdback",
duration,
base::TimeDelta::FromMilliseconds(20),
@@ -814,7 +789,7 @@ void URLRequestJob::UpdatePacketReadTimes() {
}
if (!bytes_observed_in_packets_)
- request_time_snapshot_ = GetRequestTime();
+ request_time_snapshot_ = request_ ? request_->request_time() : base::Time();
final_packet_time_ = base::Time::Now();
const size_t kTypicalPacketSize = 1430;
diff --git a/net/url_request/url_request_job.h b/net/url_request/url_request_job.h
index 49b3e6c..9d330fd 100644
--- a/net/url_request/url_request_job.h
+++ b/net/url_request/url_request_job.h
@@ -29,8 +29,7 @@ class UploadData;
class URLRequestStatus;
class X509Certificate;
-class URLRequestJob : public base::RefCounted<URLRequestJob>,
- public FilterContext {
+class URLRequestJob : public base::RefCounted<URLRequestJob> {
public:
// When histogramming results related to SDCH and/or an SDCH latency test, the
// number of packets for which we need to record arrival times so as to
@@ -178,17 +177,9 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>,
// Whether we have processed the response for that request yet.
bool has_response_started() const { return has_handled_response_; }
- // FilterContext methods:
// These methods are not applicable to all connections.
virtual bool GetMimeType(std::string* mime_type) const;
- virtual bool GetURL(GURL* gurl) const;
- virtual base::Time GetRequestTime() const;
- virtual bool IsDownload() const;
- virtual bool IsSdchResponse() const;
- virtual bool IsCachedContent() const;
- virtual int64 GetByteReadCount() const;
virtual int GetResponseCode() const;
- virtual void RecordPacketStats(StatisticSelector statistic) const;
// Returns the socket address for the connection.
// See url_request.h for details.
@@ -263,6 +254,16 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>,
// Set the status of the job.
void SetStatus(const net::URLRequestStatus& status);
+ // TODO(adamk): Remove this method once it's no longer called from
+ // URLRequestJob.
+ virtual bool IsCachedContent() const;
+
+ // TODO(adamk): Move this method to url_request_http_job.cc by exposing
+ // the required stats to URLRequestJob children.
+ void RecordPacketStats(FilterContext::StatisticSelector statistic) const;
+
+ int64 filter_input_byte_count() const { return filter_input_byte_count_; }
+
// The request that initiated this job. This value MAY BE NULL if the
// request was released by DetachRequest().
net::URLRequest* request_;
@@ -306,9 +307,6 @@ class URLRequestJob : public base::RefCounted<URLRequestJob>,
// NotifyDone so that it is kept in sync with the request.
bool done_;
- // Cache the load flags from request_ because it might go away.
- int load_flags_;
-
// The number of bytes read before passing to the filter.
int prefilter_bytes_read_;
// The number of bytes read after passing through the filter.