summaryrefslogtreecommitdiffstats
path: root/chrome/browser
diff options
context:
space:
mode:
authoreroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-05-14 02:39:14 +0000
committereroman@chromium.org <eroman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-05-14 02:39:14 +0000
commitc823ee69a6ede271c9fcf8ce60d9ad62771a5d95 (patch)
tree2c4b061f6b9b0d5c9162032cc9a1405898c2650f /chrome/browser
parentd578892cb26c7645179ebacd02edeccb2d3ba696 (diff)
downloadchromium_src-c823ee69a6ede271c9fcf8ce60d9ad62771a5d95.zip
chromium_src-c823ee69a6ede271c9fcf8ce60d9ad62771a5d95.tar.gz
chromium_src-c823ee69a6ede271c9fcf8ce60d9ad62771a5d95.tar.bz2
Refactor: Remove the PassiveLogCollector::RequestInfo::url field.
This field is unnecessary since it can be inferred from the list of log entries. Review URL: http://codereview.chromium.org/2129001 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@47235 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/browser')
-rw-r--r--chrome/browser/net/passive_log_collector.cc34
-rw-r--r--chrome/browser/net/passive_log_collector.h11
-rw-r--r--chrome/browser/net/passive_log_collector_unittest.cc22
-rw-r--r--chrome/browser/resources/net_internals/sourceentry.js2
4 files changed, 39 insertions, 30 deletions
diff --git a/chrome/browser/net/passive_log_collector.cc b/chrome/browser/net/passive_log_collector.cc
index 38f24ff..c6e4c64 100644
--- a/chrome/browser/net/passive_log_collector.cc
+++ b/chrome/browser/net/passive_log_collector.cc
@@ -11,6 +11,7 @@
#include "chrome/browser/chrome_thread.h"
namespace {
+
const size_t kMaxNumEntriesPerLog = 50;
const size_t kMaxConnectJobGraveyardSize = 3;
const size_t kMaxRequestGraveyardSize = 25;
@@ -142,6 +143,25 @@ void PassiveLogCollector::GetAllCapturedEvents(EntryList* out) const {
std::sort(out->begin(), out->end(), &SortByOrderComparator);
}
+std::string PassiveLogCollector::RequestInfo::GetURL() const {
+ // Note: we look at the first *two* entries, since the outer REQUEST_ALIVE
+ // doesn't actually contain any data.
+ for (size_t i = 0; i < 2 && i < entries.size(); ++i) {
+ const PassiveLogCollector::Entry& entry = entries[i];
+ if (entry.phase == net::NetLog::PHASE_BEGIN && entry.params) {
+ switch (entry.type) {
+ case net::NetLog::TYPE_URL_REQUEST_START:
+ case net::NetLog::TYPE_SOCKET_STREAM_CONNECT:
+ return static_cast<net::NetLogStringParameter*>(
+ entry.params.get())->value();
+ default:
+ break;
+ }
+ }
+ }
+ return std::string();
+}
+
//----------------------------------------------------------------------------
// RequestTrackerBase
//----------------------------------------------------------------------------
@@ -433,7 +453,6 @@ void PassiveLogCollector::SocketTracker::ClearInfo(RequestInfo* info) {
//----------------------------------------------------------------------------
const size_t PassiveLogCollector::RequestTracker::kMaxGraveyardSize = 25;
-const size_t PassiveLogCollector::RequestTracker::kMaxGraveyardURLSize = 1000;
PassiveLogCollector::RequestTracker::RequestTracker(
ConnectJobTracker* connect_job_tracker, SocketTracker* socket_tracker)
@@ -492,22 +511,11 @@ PassiveLogCollector::RequestTracker::DoAddEntry(const Entry& entry,
AddEntryToRequestInfo(entry, is_unbounded(), out_info);
- // If this was the start of a URLRequest/SocketStream, extract the URL.
- // Note: we look at the first *two* entries, since the outer REQUEST_ALIVE
- // doesn't actually contain any data.
- if (out_info->url.empty() && out_info->entries.size() <= 2 &&
- entry.phase == net::NetLog::PHASE_BEGIN && entry.params &&
- (entry.type == net::NetLog::TYPE_URL_REQUEST_START ||
- entry.type == net::NetLog::TYPE_SOCKET_STREAM_CONNECT)) {
- out_info->url = static_cast<net::NetLogStringParameter*>(
- entry.params.get())->value();
- }
-
// If the request has ended, move it to the graveyard.
if (entry.type == net::NetLog::TYPE_REQUEST_ALIVE &&
entry.phase == net::NetLog::PHASE_END) {
IntegrateSubordinateSource(out_info, true);
- if (StartsWithASCII(out_info->url, "chrome://", false)) {
+ if (StartsWithASCII(out_info->GetURL(), "chrome://", false)) {
// Avoid sending "chrome://" requests to the graveyard, since it just
// adds to clutter.
return ACTION_DELETE;
diff --git a/chrome/browser/net/passive_log_collector.h b/chrome/browser/net/passive_log_collector.h
index bbcf7ca..60ec83f 100644
--- a/chrome/browser/net/passive_log_collector.h
+++ b/chrome/browser/net/passive_log_collector.h
@@ -5,6 +5,7 @@
#ifndef CHROME_BROWSER_NET_PASSIVE_LOG_COLLECTOR_H_
#define CHROME_BROWSER_NET_PASSIVE_LOG_COLLECTOR_H_
+#include <string>
#include <vector>
#include "base/hash_tables.h"
@@ -49,14 +50,17 @@ class PassiveLogCollector : public ChromeNetLog::Observer {
bytes_transmitted(0),
bytes_received(0),
last_tx_rx_position(0) {}
+
+ // Returns the URL that corresponds with this source. This is
+ // only meaningful for certain source types (URL_REQUEST, SOCKET_STREAM).
+ // For the rest, it will return an empty string.
+ std::string GetURL() const;
+
uint32 source_id;
EntryList entries;
size_t num_entries_truncated;
net::NetLog::Source subordinate_source;
- // Only used in RequestTracker.
- std::string url;
-
// Only used in SocketTracker.
uint64 total_bytes_transmitted;
uint64 total_bytes_received;
@@ -162,7 +166,6 @@ class PassiveLogCollector : public ChromeNetLog::Observer {
class RequestTracker : public RequestTrackerBase {
public:
static const size_t kMaxGraveyardSize;
- static const size_t kMaxGraveyardURLSize;
RequestTracker(ConnectJobTracker* connect_job_tracker,
SocketTracker* socket_tracker);
diff --git a/chrome/browser/net/passive_log_collector_unittest.cc b/chrome/browser/net/passive_log_collector_unittest.cc
index 077597f..764b113 100644
--- a/chrome/browser/net/passive_log_collector_unittest.cc
+++ b/chrome/browser/net/passive_log_collector_unittest.cc
@@ -83,11 +83,11 @@ TEST(RequestTrackerTest, BasicBounded) {
RequestInfoList live_reqs = tracker.GetLiveRequests();
ASSERT_EQ(5u, live_reqs.size());
- EXPECT_EQ("http://req1", live_reqs[0].url);
- EXPECT_EQ("http://req2", live_reqs[1].url);
- EXPECT_EQ("http://req3", live_reqs[2].url);
- EXPECT_EQ("http://req4", live_reqs[3].url);
- EXPECT_EQ("http://req5", live_reqs[4].url);
+ EXPECT_EQ("http://req1", live_reqs[0].GetURL());
+ EXPECT_EQ("http://req2", live_reqs[1].GetURL());
+ EXPECT_EQ("http://req3", live_reqs[2].GetURL());
+ EXPECT_EQ("http://req4", live_reqs[3].GetURL());
+ EXPECT_EQ("http://req5", live_reqs[4].GetURL());
tracker.OnAddEntry(MakeEndLogEntry(1));
tracker.OnAddEntry(MakeEndLogEntry(5));
@@ -98,8 +98,8 @@ TEST(RequestTrackerTest, BasicBounded) {
live_reqs = tracker.GetLiveRequests();
ASSERT_EQ(2u, live_reqs.size());
- EXPECT_EQ("http://req2", live_reqs[0].url);
- EXPECT_EQ("http://req4", live_reqs[1].url);
+ EXPECT_EQ("http://req2", live_reqs[0].GetURL());
+ EXPECT_EQ("http://req4", live_reqs[1].GetURL());
}
TEST(RequestTrackerTest, GraveyardBounded) {
@@ -123,7 +123,7 @@ TEST(RequestTrackerTest, GraveyardBounded) {
for (size_t i = 0; i < RequestTracker::kMaxGraveyardSize; ++i) {
size_t req_number = i + RequestTracker::kMaxGraveyardSize;
std::string url = StringPrintf("http://req%" PRIuS, req_number);
- EXPECT_EQ(url, recent_reqs[i].url);
+ EXPECT_EQ(url, recent_reqs[i].GetURL());
}
}
@@ -153,7 +153,7 @@ TEST(RequestTrackerTest, GraveyardUnbounded) {
for (size_t i = 0; i < kMaxSize; ++i) {
std::string url = StringPrintf("http://req%" PRIuS, i);
- EXPECT_EQ(url, recent_reqs[i].url);
+ EXPECT_EQ(url, recent_reqs[i].GetURL());
}
}
@@ -179,8 +179,8 @@ TEST(RequestTrackerTest, GraveyardIsFiltered) {
tracker.OnAddEntry(MakeEndLogEntry(3));
ASSERT_EQ(2u, tracker.GetRecentlyDeceased().size());
- EXPECT_EQ(url2, tracker.GetRecentlyDeceased()[0].url);
- EXPECT_EQ(url3, tracker.GetRecentlyDeceased()[1].url);
+ EXPECT_EQ(url2, tracker.GetRecentlyDeceased()[0].GetURL());
+ EXPECT_EQ(url3, tracker.GetRecentlyDeceased()[1].GetURL());
}
// Convert an unbounded tracker back to being bounded.
diff --git a/chrome/browser/resources/net_internals/sourceentry.js b/chrome/browser/resources/net_internals/sourceentry.js
index 9937037..c0773ff 100644
--- a/chrome/browser/resources/net_internals/sourceentry.js
+++ b/chrome/browser/resources/net_internals/sourceentry.js
@@ -172,8 +172,6 @@ SourceEntry.prototype.getDescription = function() {
* first entry that was logged to this source. However, we skip over the
* TYPE_REQUEST_ALIVE entries which wrap TYPE_URL_REQUEST_START /
* TYPE_SOCKET_STREAM_CONNECT.
- *
- * TODO(eroman): Get rid of TYPE_REQUEST_ALIVE so this isn't necessary.
*/
SourceEntry.prototype.getStartEntry_ = function() {
if (this.entries_.length < 1)