1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
|
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/url_request/url_request_throttler_manager.h"
#include <list>
// TODO(joi): Remove once crbug.com/71721 is fixed.
#include "base/command_line.h"
#include "base/logging.h"
#include "base/string_util.h"
namespace {
// TODO(joi): Remove after crbug.com/71721 is fixed.
struct IteratorHistory {
// Copy of 'this' pointer at time of access; this helps both because
// the this pointer is often obfuscated (at least for this particular
// stack trace) in fully optimized builds, and possibly to detect
// changes in the this pointer during iteration over the map (e.g.
// from another thread overwriting memory).
net::URLRequestThrottlerManager* self;
// Copy of URL key.
char url[256];
// Not a refptr, we don't want to change behavior by keeping it alive.
net::URLRequestThrottlerEntryInterface* entry;
};
} // namespace
namespace net {
const unsigned int URLRequestThrottlerManager::kMaximumNumberOfEntries = 1500;
const unsigned int URLRequestThrottlerManager::kRequestsBetweenCollecting = 200;
URLRequestThrottlerManager* URLRequestThrottlerManager::GetInstance() {
return Singleton<URLRequestThrottlerManager>::get();
}
scoped_refptr<URLRequestThrottlerEntryInterface>
URLRequestThrottlerManager::RegisterRequestUrl(const GURL &url) {
CHECK(being_tested_ || thread_checker_.CalledOnValidThread());
// Normalize the url.
std::string url_id = GetIdFromUrl(url);
// Periodically garbage collect old entries.
GarbageCollectEntriesIfNecessary();
// Find the entry in the map or create it.
scoped_refptr<URLRequestThrottlerEntry>& entry = url_entries_[url_id];
if (entry.get() == NULL)
entry = new URLRequestThrottlerEntry();
// TODO(joi): Demote CHECKs in this file to DCHECKs (or remove them) once
// we fully understand crbug.com/71721
CHECK(entry.get());
return entry;
}
void URLRequestThrottlerManager::OverrideEntryForTests(
const GURL& url,
URLRequestThrottlerEntry* entry) {
// Normalize the url.
std::string url_id = GetIdFromUrl(url);
// Periodically garbage collect old entries.
GarbageCollectEntriesIfNecessary();
url_entries_[url_id] = entry;
}
void URLRequestThrottlerManager::EraseEntryForTests(const GURL& url) {
// Normalize the url.
std::string url_id = GetIdFromUrl(url);
url_entries_.erase(url_id);
}
void URLRequestThrottlerManager::InitializeOptions(bool enforce_throttling) {
enforce_throttling_ = enforce_throttling;
being_tested_ = false;
}
URLRequestThrottlerManager::URLRequestThrottlerManager()
: requests_since_last_gc_(0),
enforce_throttling_(true),
being_tested_(true) {
// Construction/destruction is on main thread (because BrowserMain
// retrieves an instance to call InitializeOptions), but is from then on
// used on I/O thread.
thread_checker_.DetachFromThread();
url_id_replacements_.ClearPassword();
url_id_replacements_.ClearUsername();
url_id_replacements_.ClearQuery();
url_id_replacements_.ClearRef();
// TODO(joi): Remove after crbug.com/71721 is fixed.
base::strlcpy(magic_buffer_1_, "MAGICZZ", arraysize(magic_buffer_1_));
base::strlcpy(magic_buffer_2_, "GOOGYZZ", arraysize(magic_buffer_2_));
}
URLRequestThrottlerManager::~URLRequestThrottlerManager() {
// Destruction is on main thread (AtExit), but real use is on I/O thread.
thread_checker_.DetachFromThread();
// Delete all entries.
url_entries_.clear();
}
std::string URLRequestThrottlerManager::GetIdFromUrl(const GURL& url) const {
if (!url.is_valid())
return url.possibly_invalid_spec();
GURL id = url.ReplaceComponents(url_id_replacements_);
// TODO(joi): Remove "GOOGY/MONSTA" stuff once crbug.com/71721 is done
return StringPrintf("GOOGY%sMONSTA", StringToLowerASCII(id.spec()).c_str());
}
void URLRequestThrottlerManager::GarbageCollectEntriesIfNecessary() {
requests_since_last_gc_++;
if (requests_since_last_gc_ < kRequestsBetweenCollecting)
return;
requests_since_last_gc_ = 0;
GarbageCollectEntries();
}
void URLRequestThrottlerManager::GarbageCollectEntries() {
// TODO(joi): Remove these crash report aids once crbug.com/71721
// is figured out.
// Copy the current process command line, in case some labs feature
// is in common between the crash dumps. Note that this is not equivalent
// to the command line stored in the PEB of the minidump since it may
// have been modified based on the about:labs preferences.
std::string command_line_string;
#if defined(OS_WIN)
std::wstring wstr = CommandLine::ForCurrentProcess()->command_line_string();
command_line_string = WideToASCII(wstr);
#else
command_line_string =
CommandLine::ForCurrentProcess()->command_line_string();
#endif
char command_line_buffer[400] = { 0 };
base::strlcpy(command_line_buffer, command_line_string.c_str(),
arraysize(command_line_buffer));
IteratorHistory history[32] = { { 0 } };
size_t history_ix = 0;
history[history_ix++].self = this;
int nulls_found = 0;
UrlEntryMap::iterator i = url_entries_.begin();
while (i != url_entries_.end()) {
if (i->second == NULL) {
++nulls_found;
}
// Keep a log of the first 31 items accessed after the first
// NULL encountered (hypothesis is there are multiple NULLs,
// and we may learn more about pattern of memory overwrite).
// We also log when we access the first entry, to get an original
// value for our this pointer.
if (nulls_found > 0 && history_ix < arraysize(history)) {
history[history_ix].self = this;
base::strlcpy(history[history_ix].url, i->first.c_str(),
arraysize(history[history_ix].url));
history[history_ix].entry = i->second.get();
++history_ix;
}
// TODO(joi): Remove first i->second check once no more bug.
if ((i->second) && (i->second)->IsEntryOutdated()) {
url_entries_.erase(i++);
} else {
++i;
}
}
CHECK(nulls_found == 0);
// In case something broke we want to make sure not to grow indefinitely.
while (url_entries_.size() > kMaximumNumberOfEntries) {
url_entries_.erase(url_entries_.begin());
}
}
} // namespace net
|