summaryrefslogtreecommitdiffstats
path: root/net/url_request
diff options
context:
space:
mode:
authorukai@chromium.org <ukai@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-11-13 02:30:38 +0000
committerukai@chromium.org <ukai@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-11-13 02:30:38 +0000
commitc6f27df9b6d00653f079a03bf805aa7645ab3b22 (patch)
tree290f394d8d4e9eae89abfece8d0529439cb25b61 /net/url_request
parent6a37279aca0ed7e894894042de4b226f46af6bf6 (diff)
downloadchromium_src-c6f27df9b6d00653f079a03bf805aa7645ab3b22.zip
chromium_src-c6f27df9b6d00653f079a03bf805aa7645ab3b22.tar.gz
chromium_src-c6f27df9b6d00653f079a03bf805aa7645ab3b22.tar.bz2
Plumb LoadLog into SocketStream
BUG=none TEST=none Review URL: http://codereview.chromium.org/385003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@31881 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'net/url_request')
-rw-r--r--net/url_request/request_tracker.h124
-rw-r--r--net/url_request/request_tracker_unittest.cc (renamed from net/url_request/url_request_tracker_unittest.cc)86
-rw-r--r--net/url_request/url_request.cc13
-rw-r--r--net/url_request/url_request.h11
-rw-r--r--net/url_request/url_request_context.h17
-rw-r--r--net/url_request/url_request_tracker.cc85
-rw-r--r--net/url_request/url_request_tracker.h76
-rw-r--r--net/url_request/url_request_view_net_internals_job.cc61
8 files changed, 274 insertions, 199 deletions
diff --git a/net/url_request/request_tracker.h b/net/url_request/request_tracker.h
new file mode 100644
index 0000000..f4e2425
--- /dev/null
+++ b/net/url_request/request_tracker.h
@@ -0,0 +1,124 @@
+// Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef NET_URL_REQUEST_REQUEST_TRACKER_H_
+#define NET_URL_REQUEST_REQUEST_TRACKER_H_
+
+#include <vector>
+
+#include "base/ref_counted.h"
+#include "base/linked_list.h"
+#include "base/logging.h"
+#include "googleurl/src/gurl.h"
+#include "net/base/load_log.h"
+
+// Class to track all of the live instances of Request associated with a
+// particular URLRequestContext. It keeps a circular queue of the LoadLogs
+// for recently deceased requests.
+template<typename Request>
+class RequestTracker {
+ public:
+ struct RecentRequestInfo {
+ GURL original_url;
+ scoped_refptr<net::LoadLog> load_log;
+ };
+
+ // Helper class to make Request insertable into a base::LinkedList,
+ // without making the public interface expose base::LinkNode.
+ class Node : public base::LinkNode<Node> {
+ public:
+ Node(Request* request) : request_(request) {}
+ ~Node() {}
+
+ Request* request() const { return request_; }
+
+ private:
+ Request* request_;
+ };
+
+ typedef std::vector<RecentRequestInfo> RecentRequestInfoList;
+
+ // The maximum number of entries for |graveyard_|.
+ static const size_t kMaxGraveyardSize;
+
+ // The maximum size of URLs to stuff into RecentRequestInfo.
+ static const size_t kMaxGraveyardURLSize;
+
+ RequestTracker() : next_graveyard_index_(0) {}
+ ~RequestTracker() {}
+
+ // Returns a list of Requests that are alive.
+ std::vector<Request*> GetLiveRequests() {
+ std::vector<Request*> list;
+ for (base::LinkNode<Node>* node = live_instances_.head();
+ node != live_instances_.end();
+ node = node->next()) {
+ Request* request = node->value()->request();
+ list.push_back(request);
+ }
+ return list;
+ }
+
+ // Clears the circular buffer of RecentRequestInfos.
+ void ClearRecentlyDeceased() {
+ next_graveyard_index_ = 0;
+ graveyard_.clear();
+ }
+
+ // Returns a list of recently completed Requests.
+ const RecentRequestInfoList GetRecentlyDeceased() {
+ RecentRequestInfoList list;
+
+ // Copy the items from |graveyard_| (our circular queue of recently
+ // deceased request infos) into a vector, ordered from oldest to newest.
+ for (size_t i = 0; i < graveyard_.size(); ++i) {
+ size_t index = (next_graveyard_index_ + i) % graveyard_.size();
+ list.push_back(graveyard_[index]);
+ }
+ return list;
+ }
+
+ void Add(Request* request) {
+ live_instances_.Append(&request->request_tracker_node_);
+ }
+ void Remove(Request* request) {
+ // Remove from |live_instances_|.
+ request->request_tracker_node_.RemoveFromList();
+
+ RecentRequestInfo info;
+ request->GetInfoForTracker(&info);
+ // Paranoia check: truncate |info.original_url| if it is really big.
+ const std::string& spec = info.original_url.possibly_invalid_spec();
+ if (spec.size() > kMaxGraveyardURLSize)
+ info.original_url = GURL(spec.substr(0, kMaxGraveyardURLSize));
+ // Add into |graveyard_|.
+ InsertIntoGraveyard(info);
+ }
+
+ private:
+ void InsertIntoGraveyard(const RecentRequestInfo& info) {
+ if (graveyard_.size() < kMaxGraveyardSize) {
+ // Still growing to maximum capacity.
+ DCHECK_EQ(next_graveyard_index_, graveyard_.size());
+ graveyard_.push_back(info);
+ } else {
+ // At maximum capacity, overwite the oldest entry.
+ graveyard_[next_graveyard_index_] = info;
+ }
+ next_graveyard_index_ = (next_graveyard_index_ + 1) % kMaxGraveyardSize;
+ }
+
+ base::LinkedList<Node> live_instances_;
+
+ size_t next_graveyard_index_;
+ RecentRequestInfoList graveyard_;
+};
+
+template<typename Request>
+const size_t RequestTracker<Request>::kMaxGraveyardSize = 25;
+
+template<typename Request>
+const size_t RequestTracker<Request>::kMaxGraveyardURLSize = 1000;
+
+#endif // NET_URL_REQUEST_REQUEST_TRACKER_H_
diff --git a/net/url_request/url_request_tracker_unittest.cc b/net/url_request/request_tracker_unittest.cc
index 53196ac..c3c41f4 100644
--- a/net/url_request/url_request_tracker_unittest.cc
+++ b/net/url_request/request_tracker_unittest.cc
@@ -2,24 +2,57 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "net/url_request/url_request_tracker.h"
+#include "net/url_request/request_tracker.h"
+#include "base/compiler_specific.h"
#include "base/string_util.h"
-#include "net/url_request/url_request.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace {
-TEST(URLRequestTrackerTest, Basic) {
- URLRequestTracker tracker;
+static const int kMaxNumLoadLogEntries = 1;
+
+class TestRequest {
+ public:
+ explicit TestRequest(const GURL& url)
+ : url_(url),
+ load_log_(new net::LoadLog(kMaxNumLoadLogEntries)),
+ ALLOW_THIS_IN_INITIALIZER_LIST(request_tracker_node_(this)) {}
+ ~TestRequest() {}
+
+ // This method is used in RequestTrackerTest::Basic test.
+ const GURL& original_url() const { return url_; }
+
+ private:
+ // RequestTracker<T> will access GetRecentRequestInfo() and
+ // |request_tracker_node_|.
+ friend class RequestTracker<TestRequest>;
+
+ void GetInfoForTracker(
+ RequestTracker<TestRequest>::RecentRequestInfo *info) const {
+ info->original_url = url_;
+ info->load_log = load_log_;
+ }
+
+ const GURL url_;
+ scoped_refptr<net::LoadLog> load_log_;
+
+ RequestTracker<TestRequest>::Node request_tracker_node_;
+
+ DISALLOW_COPY_AND_ASSIGN(TestRequest);
+};
+
+
+TEST(RequestTrackerTest, Basic) {
+ RequestTracker<TestRequest> tracker;
EXPECT_EQ(0u, tracker.GetLiveRequests().size());
EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
- URLRequest req1(GURL("http://req1"), NULL);
- URLRequest req2(GURL("http://req2"), NULL);
- URLRequest req3(GURL("http://req3"), NULL);
- URLRequest req4(GURL("http://req4"), NULL);
- URLRequest req5(GURL("http://req5"), NULL);
+ TestRequest req1(GURL("http://req1"));
+ TestRequest req2(GURL("http://req2"));
+ TestRequest req3(GURL("http://req3"));
+ TestRequest req4(GURL("http://req4"));
+ TestRequest req5(GURL("http://req5"));
tracker.Add(&req1);
tracker.Add(&req2);
@@ -27,7 +60,7 @@ TEST(URLRequestTrackerTest, Basic) {
tracker.Add(&req4);
tracker.Add(&req5);
- std::vector<URLRequest*> live_reqs = tracker.GetLiveRequests();
+ std::vector<TestRequest*> live_reqs = tracker.GetLiveRequests();
ASSERT_EQ(5u, live_reqs.size());
EXPECT_EQ(GURL("http://req1"), live_reqs[0]->original_url());
@@ -49,40 +82,43 @@ TEST(URLRequestTrackerTest, Basic) {
EXPECT_EQ(GURL("http://req4"), live_reqs[1]->original_url());
}
-TEST(URLRequestTrackerTest, GraveyardBounded) {
- URLRequestTracker tracker;
+TEST(RequestTrackerTest, GraveyardBounded) {
+ RequestTracker<TestRequest> tracker;
EXPECT_EQ(0u, tracker.GetLiveRequests().size());
EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
// Add twice as many requests as will fit in the graveyard.
- for (size_t i = 0; i < URLRequestTracker::kMaxGraveyardSize * 2; ++i) {
- URLRequest req(GURL(StringPrintf("http://req%d", i).c_str()), NULL);
+ for (size_t i = 0;
+ i < RequestTracker<TestRequest>::kMaxGraveyardSize * 2;
+ ++i) {
+ TestRequest req(GURL(StringPrintf("http://req%d", i).c_str()));
tracker.Add(&req);
tracker.Remove(&req);
}
// Check that only the last |kMaxGraveyardSize| requests are in-memory.
- URLRequestTracker::RecentRequestInfoList recent_reqs =
+ RequestTracker<TestRequest>::RecentRequestInfoList recent_reqs =
tracker.GetRecentlyDeceased();
- ASSERT_EQ(URLRequestTracker::kMaxGraveyardSize, recent_reqs.size());
+ ASSERT_EQ(RequestTracker<TestRequest>::kMaxGraveyardSize, recent_reqs.size());
- for (size_t i = 0; i < URLRequestTracker::kMaxGraveyardSize; ++i) {
- size_t req_number = i + URLRequestTracker::kMaxGraveyardSize;
+ for (size_t i = 0; i < RequestTracker<TestRequest>::kMaxGraveyardSize; ++i) {
+ size_t req_number = i + RequestTracker<TestRequest>::kMaxGraveyardSize;
GURL url(StringPrintf("http://req%d", req_number).c_str());
EXPECT_EQ(url, recent_reqs[i].original_url);
}
}
// Check that very long URLs are truncated.
-TEST(URLRequestTrackerTest, GraveyardURLBounded) {
- URLRequestTracker tracker;
+TEST(RequestTrackerTest, GraveyardURLBounded) {
+ RequestTracker<TestRequest> tracker;
std::string big_url_spec("http://");
- big_url_spec.resize(2 * URLRequestTracker::kMaxGraveyardURLSize, 'x');
+ big_url_spec.resize(2 * RequestTracker<TestRequest>::kMaxGraveyardURLSize,
+ 'x');
GURL big_url(big_url_spec);
- URLRequest req(big_url, NULL);
+ TestRequest req(big_url);
tracker.Add(&req);
tracker.Remove(&req);
@@ -90,20 +126,20 @@ TEST(URLRequestTrackerTest, GraveyardURLBounded) {
ASSERT_EQ(1u, tracker.GetRecentlyDeceased().size());
// The +1 is because GURL canonicalizes with a trailing '/' ... maybe
// we should just save the std::string rather than the GURL.
- EXPECT_EQ(URLRequestTracker::kMaxGraveyardURLSize + 1,
+ EXPECT_EQ(RequestTracker<TestRequest>::kMaxGraveyardURLSize + 1,
tracker.GetRecentlyDeceased()[0].original_url.spec().size());
}
// Test the doesn't fail if the URL was invalid. http://crbug.com/21423.
TEST(URLRequestTrackerTest, TrackingInvalidURL) {
- URLRequestTracker tracker;
+ RequestTracker<TestRequest> tracker;
EXPECT_EQ(0u, tracker.GetLiveRequests().size());
EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
GURL invalid_url("xabc");
EXPECT_FALSE(invalid_url.is_valid());
- URLRequest req(invalid_url, NULL);
+ TestRequest req(invalid_url);
tracker.Add(&req);
tracker.Remove(&req);
diff --git a/net/url_request/url_request.cc b/net/url_request/url_request.cc
index 96fb051..9577a4a 100644
--- a/net/url_request/url_request.cc
+++ b/net/url_request/url_request.cc
@@ -49,7 +49,7 @@ URLRequest::URLRequest(const GURL& url, Delegate* delegate)
redirect_limit_(kMaxRedirects),
final_upload_progress_(0),
priority_(0),
- ALLOW_THIS_IN_INITIALIZER_LIST(url_request_tracker_node_(this)) {
+ ALLOW_THIS_IN_INITIALIZER_LIST(request_tracker_node_(this)) {
SIMPLE_STATS_COUNTER("URLRequestCount");
// Sanity check out environment.
@@ -495,9 +495,9 @@ void URLRequest::set_context(URLRequestContext* context) {
// If the context this request belongs to has changed, update the tracker(s).
if (prev_context != context) {
if (prev_context)
- prev_context->request_tracker()->Remove(this);
+ prev_context->url_request_tracker()->Remove(this);
if (context)
- context->request_tracker()->Add(this);
+ context->url_request_tracker()->Add(this);
}
}
@@ -519,3 +519,10 @@ URLRequest::UserData* URLRequest::GetUserData(const void* key) const {
void URLRequest::SetUserData(const void* key, UserData* data) {
user_data_[key] = linked_ptr<UserData>(data);
}
+
+void URLRequest::GetInfoForTracker(
+ RequestTracker<URLRequest>::RecentRequestInfo* info) const {
+ DCHECK(info);
+ info->original_url = original_url_;
+ info->load_log = load_log_;
+}
diff --git a/net/url_request/url_request.h b/net/url_request/url_request.h
index e6aaf47..bd07767 100644
--- a/net/url_request/url_request.h
+++ b/net/url_request/url_request.h
@@ -19,8 +19,8 @@
#include "net/base/load_log.h"
#include "net/base/load_states.h"
#include "net/http/http_response_info.h"
+#include "net/url_request/request_tracker.h"
#include "net/url_request/url_request_status.h"
-#include "net/url_request/url_request_tracker.h"
namespace base {
class Time;
@@ -528,7 +528,7 @@ class URLRequest {
private:
friend class URLRequestJob;
- friend class URLRequestTracker;
+ friend class RequestTracker<URLRequest>;
void StartJob(URLRequestJob* job);
@@ -550,6 +550,11 @@ class URLRequest {
// Origin).
static std::string StripPostSpecificHeaders(const std::string& headers);
+ // Gets the goodies out of this that we want to show the user later on the
+ // chrome://net-internals/ page.
+ void GetInfoForTracker(
+ RequestTracker<URLRequest>::RecentRequestInfo *info) const;
+
// Contextual information used for this request (can be NULL). This contains
// most of the dependencies which are shared between requests (disk cache,
// cookie store, socket poool, etc.)
@@ -603,7 +608,7 @@ class URLRequest {
// this to determine which URLRequest to allocate sockets to first.
int priority_;
- URLRequestTracker::Node url_request_tracker_node_;
+ RequestTracker<URLRequest>::Node request_tracker_node_;
base::LeakTracker<URLRequest> leak_tracker_;
DISALLOW_COPY_AND_ASSIGN(URLRequest);
diff --git a/net/url_request/url_request_context.h b/net/url_request/url_request_context.h
index 67b378a..f02bc43 100644
--- a/net/url_request/url_request_context.h
+++ b/net/url_request/url_request_context.h
@@ -19,11 +19,12 @@
#include "net/base/strict_transport_security_state.h"
#include "net/ftp/ftp_auth_cache.h"
#include "net/proxy/proxy_service.h"
-#include "net/url_request/url_request_tracker.h"
+#include "net/url_request/request_tracker.h"
namespace net {
class FtpTransactionFactory;
class HttpTransactionFactory;
+class SocketStream;
}
class URLRequest;
@@ -81,7 +82,14 @@ class URLRequestContext :
const std::string& accept_language() const { return accept_language_; }
// Gets the tracker for URLRequests associated with this context.
- URLRequestTracker* request_tracker() { return &request_tracker_; }
+ RequestTracker<URLRequest>* url_request_tracker() {
+ return &url_request_tracker_;
+ }
+
+ // Gets the tracker for SocketStreams associated with this context.
+ RequestTracker<net::SocketStream>* socket_stream_tracker() {
+ return &socket_stream_tracker_;
+ }
// Gets the UA string to use for the given URL. Pass an invalid URL (such as
// GURL()) to get the default UA string. Subclasses should override this
@@ -135,7 +143,10 @@ class URLRequestContext :
std::string referrer_charset_;
// Tracks the requests associated with this context.
- URLRequestTracker request_tracker_;
+ RequestTracker<URLRequest> url_request_tracker_;
+
+ // Trakcs the socket streams associated with this context.
+ RequestTracker<net::SocketStream> socket_stream_tracker_;
private:
DISALLOW_COPY_AND_ASSIGN(URLRequestContext);
diff --git a/net/url_request/url_request_tracker.cc b/net/url_request/url_request_tracker.cc
deleted file mode 100644
index 14e5bc3..0000000
--- a/net/url_request/url_request_tracker.cc
+++ /dev/null
@@ -1,85 +0,0 @@
-// Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "net/url_request/url_request_tracker.h"
-
-#include "base/logging.h"
-#include "net/url_request/url_request.h"
-
-const size_t URLRequestTracker::kMaxGraveyardSize = 25;
-const size_t URLRequestTracker::kMaxGraveyardURLSize = 1000;
-
-URLRequestTracker::URLRequestTracker() : next_graveyard_index_(0) {}
-
-URLRequestTracker::~URLRequestTracker() {}
-
-std::vector<URLRequest*> URLRequestTracker::GetLiveRequests() {
- std::vector<URLRequest*> list;
- for (base::LinkNode<Node>* node = live_instances_.head();
- node != live_instances_.end();
- node = node->next()) {
- URLRequest* url_request = node->value()->url_request();
- list.push_back(url_request);
- }
- return list;
-}
-
-void URLRequestTracker::ClearRecentlyDeceased() {
- next_graveyard_index_ = 0;
- graveyard_.clear();
-}
-
-const URLRequestTracker::RecentRequestInfoList
-URLRequestTracker::GetRecentlyDeceased() {
- RecentRequestInfoList list;
-
- // Copy the items from |graveyard_| (our circular queue of recently
- // deceased request infos) into a vector, ordered from oldest to
- // newest.
- for (size_t i = 0; i < graveyard_.size(); ++i) {
- size_t index = (next_graveyard_index_ + i) % graveyard_.size();
- list.push_back(graveyard_[index]);
- }
- return list;
-}
-
-void URLRequestTracker::Add(URLRequest* url_request) {
- live_instances_.Append(&url_request->url_request_tracker_node_);
-}
-
-void URLRequestTracker::Remove(URLRequest* url_request) {
- // Remove from |live_instances_|.
- url_request->url_request_tracker_node_.RemoveFromList();
-
- // Add into |graveyard_|.
- InsertIntoGraveyard(ExtractInfo(url_request));
-}
-
-// static
-const URLRequestTracker::RecentRequestInfo
-URLRequestTracker::ExtractInfo(URLRequest* url_request) {
- RecentRequestInfo info;
- info.original_url = url_request->original_url();
- info.load_log = url_request->load_log();
-
- // Paranoia check: truncate |info.original_url| if it is really big.
- const std::string& spec = info.original_url.possibly_invalid_spec();
- if (spec.size() > kMaxGraveyardURLSize)
- info.original_url = GURL(spec.substr(0, kMaxGraveyardURLSize));
- return info;
-}
-
-void URLRequestTracker::InsertIntoGraveyard(
- const RecentRequestInfo& info) {
- if (graveyard_.size() < kMaxGraveyardSize) {
- // Still growing to maximum capacity.
- DCHECK_EQ(next_graveyard_index_, graveyard_.size());
- graveyard_.push_back(info);
- } else {
- // At maximum capacity, overwrite the oldest entry.
- graveyard_[next_graveyard_index_] = info;
- }
-
- next_graveyard_index_ = (next_graveyard_index_ + 1) % kMaxGraveyardSize;
-}
diff --git a/net/url_request/url_request_tracker.h b/net/url_request/url_request_tracker.h
deleted file mode 100644
index 36d05d1..0000000
--- a/net/url_request/url_request_tracker.h
+++ /dev/null
@@ -1,76 +0,0 @@
-// Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef NET_URL_REQUEST_URL_REQUEST_TRACKER_H_
-#define NET_URL_REQUEST_URL_REQUEST_TRACKER_H_
-
-#include <vector>
-
-#include "base/ref_counted.h"
-#include "base/linked_list.h"
-#include "googleurl/src/gurl.h"
-#include "net/base/load_log.h"
-
-class URLRequest;
-
-// Class to track all of the live instances of URLRequest associated with a
-// particular URLRequestContext. It keep a circular queue of the LoadLogs
-// for recently deceased requests.
-class URLRequestTracker {
- public:
- struct RecentRequestInfo {
- GURL original_url;
- scoped_refptr<net::LoadLog> load_log;
- };
-
- // Helper class to make URLRequest insertable into a base::LinkedList,
- // without making the public interface expose base::LinkNode.
- class Node : public base::LinkNode<Node> {
- public:
- Node(URLRequest* url_request) : url_request_(url_request) {}
- ~Node() {}
-
- URLRequest* url_request() const { return url_request_; }
-
- private:
- URLRequest* url_request_;
- };
-
- typedef std::vector<RecentRequestInfo> RecentRequestInfoList;
-
- // The maximum number of entries for |graveyard_|.
- static const size_t kMaxGraveyardSize;
-
- // The maximum size of URLs to stuff into RecentRequestInfo.
- static const size_t kMaxGraveyardURLSize;
-
- URLRequestTracker();
- ~URLRequestTracker();
-
- // Returns a list of URLRequests that are alive.
- std::vector<URLRequest*> GetLiveRequests();
-
- // Clears the circular buffer of RecentRequestInfos.
- void ClearRecentlyDeceased();
-
- // Returns a list of recently completed URLRequests.
- const RecentRequestInfoList GetRecentlyDeceased();
-
- void Add(URLRequest* url_request);
- void Remove(URLRequest* url_request);
-
- private:
- // Copy the goodies out of |url_request| that we want to show the
- // user later on the about:net-internal page.
- static const RecentRequestInfo ExtractInfo(URLRequest* url_request);
-
- void InsertIntoGraveyard(const RecentRequestInfo& info);
-
- base::LinkedList<Node> live_instances_;
-
- size_t next_graveyard_index_;
- RecentRequestInfoList graveyard_;
-};
-
-#endif // NET_URL_REQUEST_URL_REQUEST_TRACKER_H_
diff --git a/net/url_request/url_request_view_net_internals_job.cc b/net/url_request/url_request_view_net_internals_job.cc
index ad97b63..b98e9ef 100644
--- a/net/url_request/url_request_view_net_internals_job.cc
+++ b/net/url_request/url_request_view_net_internals_job.cc
@@ -20,6 +20,7 @@
#include "net/base/net_errors.h"
#include "net/base/net_util.h"
#include "net/proxy/proxy_service.h"
+#include "net/socket_stream/socket_stream.h"
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_context.h"
#include "net/url_request/view_cache_helper.h"
@@ -330,7 +331,7 @@ class URLRequestLiveSubSection : public SubSection {
virtual void OutputBody(URLRequestContext* context, std::string* out) {
std::vector<URLRequest*> requests =
- context->request_tracker()->GetLiveRequests();
+ context->url_request_tracker()->GetLiveRequests();
out->append("<ol>");
for (size_t i = 0; i < requests.size(); ++i) {
@@ -351,8 +352,8 @@ class URLRequestRecentSubSection : public SubSection {
}
virtual void OutputBody(URLRequestContext* context, std::string* out) {
- URLRequestTracker::RecentRequestInfoList recent =
- context->request_tracker()->GetRecentlyDeceased();
+ RequestTracker<URLRequest>::RecentRequestInfoList recent =
+ context->url_request_tracker()->GetRecentlyDeceased();
out->append("<ol>");
for (size_t i = 0; i < recent.size(); ++i) {
@@ -400,6 +401,58 @@ class HttpCacheSection : public SubSection {
}
};
+class SocketStreamLiveSubSection : public SubSection {
+ public:
+ SocketStreamLiveSubSection(SubSection* parent)
+ : SubSection(parent, "live", "Live SocketStreams") {
+ }
+
+ virtual void OutputBody(URLRequestContext* context, std::string* out) {
+ std::vector<net::SocketStream*> sockets =
+ context->socket_stream_tracker()->GetLiveRequests();
+
+ out->append("<ol>");
+ for (size_t i = 0; i < sockets.size(); ++i) {
+ // Reverse the list order, so we dispay from most recent to oldest.
+ size_t index = sockets.size() - i - 1;
+ OutputURLAndLoadLog(sockets[index]->url(),
+ sockets[index]->load_log(),
+ out);
+ }
+ out->append("</ol>");
+ }
+};
+
+class SocketStreamRecentSubSection : public SubSection {
+ public:
+ SocketStreamRecentSubSection(SubSection* parent)
+ : SubSection(parent, "recent", "Recently completed SocketStreams") {
+ }
+
+ virtual void OutputBody(URLRequestContext* context, std::string* out) {
+ RequestTracker<net::SocketStream>::RecentRequestInfoList recent =
+ context->socket_stream_tracker()->GetRecentlyDeceased();
+
+ out->append("<ol>");
+ for (size_t i = 0; i < recent.size(); ++i) {
+ // Reverse the list order, so we dispay from most recent to oldest.
+ size_t index = recent.size() - i - 1;
+ OutputURLAndLoadLog(recent[index].original_url,
+ recent[index].load_log, out);
+ }
+ out->append("</ol>");
+ }
+};
+
+class SocketStreamSubSection : public SubSection {
+ public:
+ SocketStreamSubSection(SubSection* parent)
+ : SubSection(parent, "socketstream", "SocketStream") {
+ AddSubSection(new SocketStreamLiveSubSection(this));
+ AddSubSection(new SocketStreamRecentSubSection(this));
+ }
+};
+
class AllSubSections : public SubSection {
public:
AllSubSections() : SubSection(NULL, "", "") {
@@ -407,6 +460,7 @@ class AllSubSections : public SubSection {
AddSubSection(new HostResolverSubSection(this));
AddSubSection(new URLRequestSubSection(this));
AddSubSection(new HttpCacheSection(this));
+ AddSubSection(new SocketStreamSubSection(this));
}
};
@@ -480,4 +534,3 @@ bool URLRequestViewNetInternalsJob::GetData(std::string* mime_type,
return true;
}
-