summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--chrome/browser/net/chrome_url_request_context.cc8
-rw-r--r--chrome/browser/net/chrome_url_request_context.h4
-rw-r--r--net/url_request/request_tracker.h26
-rw-r--r--net/url_request/request_tracker_unittest.cc38
4 files changed, 73 insertions, 3 deletions
diff --git a/chrome/browser/net/chrome_url_request_context.cc b/chrome/browser/net/chrome_url_request_context.cc
index 97fba69..3873748 100644
--- a/chrome/browser/net/chrome_url_request_context.cc
+++ b/chrome/browser/net/chrome_url_request_context.cc
@@ -632,6 +632,8 @@ void ChromeURLRequestContextGetter::GetCookieStoreAsyncHelper(
ChromeURLRequestContext::ChromeURLRequestContext() {
CheckCurrentlyOnIOThread();
+ url_request_tracker()->SetGraveyardFilter(
+ &ChromeURLRequestContext::ShouldTrackRequest);
}
ChromeURLRequestContext::~ChromeURLRequestContext() {
@@ -791,6 +793,12 @@ void ChromeURLRequestContext::OnDefaultCharsetChange(
net::HttpUtil::GenerateAcceptCharsetHeader(default_charset);
}
+// static
+bool ChromeURLRequestContext::ShouldTrackRequest(const GURL& url) {
+ // Exclude "chrome://" URLs from our recent requests circular buffer.
+ return !url.SchemeIs("chrome");
+}
+
// ----------------------------------------------------------------------------
// ChromeURLRequestContextFactory
// ----------------------------------------------------------------------------
diff --git a/chrome/browser/net/chrome_url_request_context.h b/chrome/browser/net/chrome_url_request_context.h
index 912f484..5d697fc 100644
--- a/chrome/browser/net/chrome_url_request_context.h
+++ b/chrome/browser/net/chrome_url_request_context.h
@@ -279,6 +279,10 @@ class ChromeURLRequestContext : public URLRequestContext {
bool is_off_the_record_;
private:
+ // Filter for url_request_tracker(), that prevents "chrome://" requests from
+ // being tracked by "about:net-internals".
+ static bool ShouldTrackRequest(const GURL& url);
+
DISALLOW_COPY_AND_ASSIGN(ChromeURLRequestContext);
};
diff --git a/net/url_request/request_tracker.h b/net/url_request/request_tracker.h
index f4e2425..abfdf4b 100644
--- a/net/url_request/request_tracker.h
+++ b/net/url_request/request_tracker.h
@@ -38,6 +38,7 @@ class RequestTracker {
};
typedef std::vector<RecentRequestInfo> RecentRequestInfoList;
+ typedef bool (*RecentRequestsFilterFunc)(const GURL&);
// The maximum number of entries for |graveyard_|.
static const size_t kMaxGraveyardSize;
@@ -45,7 +46,7 @@ class RequestTracker {
// The maximum size of URLs to stuff into RecentRequestInfo.
static const size_t kMaxGraveyardURLSize;
- RequestTracker() : next_graveyard_index_(0) {}
+ RequestTracker() : next_graveyard_index_(0), graveyard_filter_func_(NULL) {}
~RequestTracker() {}
// Returns a list of Requests that are alive.
@@ -82,6 +83,7 @@ class RequestTracker {
void Add(Request* request) {
live_instances_.Append(&request->request_tracker_node_);
}
+
void Remove(Request* request) {
// Remove from |live_instances_|.
request->request_tracker_node_.RemoveFromList();
@@ -92,11 +94,28 @@ class RequestTracker {
const std::string& spec = info.original_url.possibly_invalid_spec();
if (spec.size() > kMaxGraveyardURLSize)
info.original_url = GURL(spec.substr(0, kMaxGraveyardURLSize));
- // Add into |graveyard_|.
- InsertIntoGraveyard(info);
+
+ if (ShouldInsertIntoGraveyard(info)) {
+ // Add into |graveyard_|.
+ InsertIntoGraveyard(info);
+ }
+ }
+
+ // This function lets you exclude requests from being saved to the graveyard.
+ // The graveyard is a circular buffer of the most recently completed
+ // requests. Pass NULL turn off filtering. Otherwise pass in a function
+ // returns false to exclude requests, true otherwise.
+ void SetGraveyardFilter(RecentRequestsFilterFunc filter_func) {
+ graveyard_filter_func_ = filter_func;
}
private:
+ bool ShouldInsertIntoGraveyard(const RecentRequestInfo& info) {
+ if (!graveyard_filter_func_)
+ return true;
+ return graveyard_filter_func_(info.original_url);
+ }
+
void InsertIntoGraveyard(const RecentRequestInfo& info) {
if (graveyard_.size() < kMaxGraveyardSize) {
// Still growing to maximum capacity.
@@ -113,6 +132,7 @@ class RequestTracker {
size_t next_graveyard_index_;
RecentRequestInfoList graveyard_;
+ RecentRequestsFilterFunc graveyard_filter_func_;
};
template<typename Request>
diff --git a/net/url_request/request_tracker_unittest.cc b/net/url_request/request_tracker_unittest.cc
index 760d5be..e603129 100644
--- a/net/url_request/request_tracker_unittest.cc
+++ b/net/url_request/request_tracker_unittest.cc
@@ -150,4 +150,42 @@ TEST(URLRequestTrackerTest, TrackingInvalidURL) {
EXPECT_FALSE(tracker.GetRecentlyDeceased()[0].original_url.is_valid());
}
+bool ShouldRequestBeAddedToGraveyard(const GURL& url) {
+ return !url.SchemeIs("chrome") && !url.SchemeIs("data");
+}
+
+// Check that we can exclude "chrome://" URLs and "data:" URLs from being
+// saved into the recent requests list (graveyard), by using a filter.
+TEST(RequestTrackerTest, GraveyardCanBeFiltered) {
+ RequestTracker<TestRequest> tracker;
+
+ tracker.SetGraveyardFilter(ShouldRequestBeAddedToGraveyard);
+
+ // This will be excluded.
+ TestRequest req1(GURL("chrome://dontcare"));
+ tracker.Add(&req1);
+ tracker.Remove(&req1);
+
+ // This will be be added to graveyard.
+ TestRequest req2(GURL("chrome2://dontcare"));
+ tracker.Add(&req2);
+ tracker.Remove(&req2);
+
+ // This will be be added to graveyard.
+ TestRequest req3(GURL("http://foo"));
+ tracker.Add(&req3);
+ tracker.Remove(&req3);
+
+ // This will be be excluded.
+ TestRequest req4(GURL("data:sup"));
+ tracker.Add(&req4);
+ tracker.Remove(&req4);
+
+ ASSERT_EQ(2u, tracker.GetRecentlyDeceased().size());
+ EXPECT_EQ("chrome2://dontcare/",
+ tracker.GetRecentlyDeceased()[0].original_url.spec());
+ EXPECT_EQ("http://foo/",
+ tracker.GetRecentlyDeceased()[1].original_url.spec());
+}
+
} // namespace