summaryrefslogtreecommitdiffstats
path: root/tools/chrome_proxy
diff options
context:
space:
mode:
Diffstat (limited to 'tools/chrome_proxy')
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py19
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py61
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py120
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py82
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback.py24
5 files changed, 272 insertions, 34 deletions
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py b/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
index 4ef8303..6275ecc 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
@@ -39,6 +39,7 @@ class ChromeProxyDataSaving(benchmark.Benchmark):
tag = 'data_saving'
test = measurements.ChromeProxyDataSaving
page_set = pagesets.Top20PageSet
+
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
@@ -66,24 +67,35 @@ class ChromeProxyHeaderValidation(benchmark.Benchmark):
test = measurements.ChromeProxyHeaders
page_set = pagesets.Top20PageSet
+
@benchmark.Enabled('android')
class ChromeProxyClientVersion(benchmark.Benchmark):
tag = 'client_version'
test = measurements.ChromeProxyClientVersion
page_set = pagesets.SyntheticPageSet
+
@benchmark.Enabled('android')
class ChromeProxyBypass(benchmark.Benchmark):
tag = 'bypass'
test = measurements.ChromeProxyBypass
page_set = pagesets.BypassPageSet
+
+@benchmark.Enabled('android')
+class ChromeProxyFallback(benchmark.Benchmark):
+ tag = 'fallback'
+ test = measurements.ChromeProxyFallback
+ page_set = pagesets.FallbackPageSet
+
+
@benchmark.Enabled('android')
class ChromeProxyCorsBypass(benchmark.Benchmark):
tag = 'bypass'
test = measurements.ChromeProxyCorsBypass
page_set = pagesets.CorsBypassPageSet
+
@benchmark.Enabled('android')
class ChromeProxyBlockOnce(benchmark.Benchmark):
tag = 'block_once'
@@ -120,6 +132,13 @@ class ChromeProxyHTTPToDirectFallback(benchmark.Benchmark):
@benchmark.Enabled('android')
+class ChromeProxyExplicitBypass(benchmark.Benchmark):
+ tag = 'explicit-bypass'
+ test = measurements.ChromeProxyExplicitBypass
+ page_set = pagesets.ExplicitBypassPageSet
+
+
+@benchmark.Enabled('android')
class ChromeProxySmoke(benchmark.Benchmark):
tag = 'smoke'
test = measurements.ChromeProxySmoke
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py b/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
index d3d5af3..319ef7d 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
@@ -112,8 +112,18 @@ class ChromeProxyBypass(ChromeProxyValidation):
self._metrics.AddResultsForBypass(tab, results)
+class ChromeProxyFallback(ChromeProxyValidation):
+ """Correctness measurement for proxy fallback responses."""
+
+ def __init__(self):
+ super(ChromeProxyFallback, self).__init__(restart_after_each_page=True)
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForFallback(tab, results)
+
+
class ChromeProxyCorsBypass(ChromeProxyValidation):
- """Correctness measurement for bypass responses."""
+ """Correctness measurement for bypass responses for CORS requests."""
def __init__(self):
super(ChromeProxyCorsBypass, self).__init__(restart_after_each_page=True)
@@ -123,7 +133,7 @@ class ChromeProxyCorsBypass(ChromeProxyValidation):
# finishes.
tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 15000)
super(ChromeProxyCorsBypass,
- self).ValidateAndMeasurePag1Ge(page, tab, results)
+ self).ValidateAndMeasurePage(page, tab, results)
def AddResults(self, tab, results):
self._metrics.AddResultsForCorsBypass(tab, results)
@@ -163,7 +173,8 @@ _TEST_SERVER_DEFAULT_URL = 'http://' + _TEST_SERVER + '/default'
#
# The test server allow request to override response status, headers, and
# body through query parameters. See GetResponseOverrideURL.
-def GetResponseOverrideURL(url, respStatus=0, respHeader="", respBody=""):
+def GetResponseOverrideURL(url=_TEST_SERVER_DEFAULT_URL, respStatus=0,
+ respHeader="", respBody=""):
""" Compose the request URL with query parameters to override
the chromeproxy-test server response.
"""
@@ -201,7 +212,6 @@ class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation):
# Use the test server probe URL which returns the response
# body as specified by respBody.
probe_url = GetResponseOverrideURL(
- _TEST_SERVER_DEFAULT_URL,
respBody='not OK')
options.AppendExtraBrowserArgs(
'--data-reduction-proxy-probe-url=%s' % probe_url)
@@ -284,6 +294,49 @@ class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation):
self._metrics.AddResultsForHTTPToDirectFallback(tab, results)
+class ChromeProxyExplicitBypass(ChromeProxyValidation):
+ """Correctness measurement for explicit proxy bypasses.
+
+ In this test, the configured proxy is the chromeproxy-test server which
+ will send back a response without the expected Via header. Chrome is
+ expected to use the fallback proxy and add the configured proxy to the
+ bad proxy list.
+ """
+
+ def __init__(self):
+ super(ChromeProxyExplicitBypass, self).__init__(
+ restart_after_each_page=True)
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyExplicitBypass,
+ self).CustomizeBrowserOptions(options)
+ options.AppendExtraBrowserArgs('--ignore-certificate-errors')
+ options.AppendExtraBrowserArgs(
+ '--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER)
+ options.AppendExtraBrowserArgs(
+ '--spdy-proxy-auth-value=%s' % _FAKE_PROXY_AUTH_VALUE)
+
+ def AddResults(self, tab, results):
+ bad_proxies = [{
+ 'proxy': _TEST_SERVER + ':80',
+ 'retry_seconds_low': self._page.bypass_seconds_low,
+ 'retry_seconds_high': self._page.bypass_seconds_high
+ }]
+ if self._page.num_bypassed_proxies == 2:
+ bad_proxies.append({
+ 'proxy': self._metrics.effective_proxies['fallback'],
+ 'retry_seconds_low': self._page.bypass_seconds_low,
+ 'retry_seconds_high': self._page.bypass_seconds_high
+ })
+ else:
+ # Even if the test page only causes the primary proxy to be bypassed,
+ # Chrome will attempt to fetch the favicon for the test server through
+ # the data reduction proxy, which will cause a "block=0" bypass.
+ bad_proxies.append({'proxy': self._metrics.effective_proxies['fallback']})
+
+ self._metrics.AddResultsForExplicitBypass(tab, results, bad_proxies)
+
+
class ChromeProxySmoke(ChromeProxyValidation):
"""Smoke measurement for basic chrome proxy correctness."""
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
index 6573598..adcc241 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
@@ -169,6 +169,11 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
results.AddValue(scalar.ScalarValue(
results.current_page, 'version_test', 'count', 1))
+ def ProxyListForDev(self, proxies):
+ return [self.effective_proxies['proxy-dev']
+ if proxy == self.effective_proxies['proxy']
+ else proxy for proxy in proxies]
+
def IsProxyBypassed(self, tab):
"""Get whether all configured proxies are bypassed.
@@ -191,8 +196,7 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
proxies = [self.effective_proxies['proxy'],
self.effective_proxies['fallback']]
proxies.sort()
- proxies_dev = [self.effective_proxies['proxy-dev'],
- self.effective_proxies['fallback']]
+ proxies_dev = self.ProxyListForDev(proxies)
proxies_dev.sort()
if bad_proxies == proxies:
return True, proxies
@@ -200,38 +204,54 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
return True, proxies_dev
return False, []
- @staticmethod
- def VerifyBadProxies(
- badProxies, expected_proxies,
- retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
- retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
- """Verify the bad proxy list and their retry times are expected. """
- if not badProxies or (len(badProxies) != len(expected_proxies)):
- return False
+ def VerifyBadProxies(self, bad_proxies, expected_bad_proxies):
+ """Verify the bad proxy list and their retry times are expected.
- # Check all expected proxies.
- proxies = [p['proxy'] for p in badProxies]
- expected_proxies.sort()
- proxies.sort()
- if not expected_proxies == proxies:
- raise ChromeProxyMetricException, (
- 'Bad proxies: got %s want %s' % (
- str(badProxies), str(expected_proxies)))
+ Args:
+ bad_proxies: the list of actual bad proxies and their retry times.
+ expected_bad_proxies: a list of dictionaries in the form:
- # Check retry time
- for p in badProxies:
+ {'proxy': <proxy origin>,
+ 'retry_seconds_low': <minimum bypass duration in seconds>,
+ 'retry_seconds_high': <maximum bypass duration in seconds>}
+
+ If an element in the list is missing either the 'retry_seconds_low'
+ entry or the 'retry_seconds_high' entry, the default bypass minimum
+ and maximum durations respectively will be used for that element.
+ """
+ if not bad_proxies:
+ bad_proxies = []
+
+ # Check that each of the proxy origins and retry times match.
+ for bad_proxy, expected_bad_proxy in map(None, bad_proxies,
+ expected_bad_proxies):
+ # Check if the proxy origins match, allowing for the proxy-dev origin in
+ # the place of the HTTPS proxy origin.
+ if (bad_proxy['proxy'] != expected_bad_proxy['proxy'] and
+ bad_proxy['proxy'] != expected_bad_proxy['proxy'].replace(
+ self.effective_proxies['proxy'],
+ self.effective_proxies['proxy-dev'])):
+ raise ChromeProxyMetricException, (
+ 'Actual and expected bad proxies should match: %s vs. %s' % (
+ str(bad_proxy), str(expected_bad_proxy)))
+
+ # Check that the retry times match.
+ retry_seconds_low = expected_bad_proxy.get('retry_seconds_low',
+ DEFAULT_BYPASS_MIN_SECONDS)
+ retry_seconds_high = expected_bad_proxy.get('retry_seconds_high',
+ DEFAULT_BYPASS_MAX_SECONDS)
retry_time_low = (datetime.datetime.now() +
datetime.timedelta(seconds=retry_seconds_low))
retry_time_high = (datetime.datetime.now() +
- datetime.timedelta(seconds=retry_seconds_high))
- got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
+ datetime.timedelta(seconds=retry_seconds_high))
+ got_retry_time = datetime.datetime.fromtimestamp(
+ int(bad_proxy['retry'])/1000)
if not ProxyRetryTimeInRange(
got_retry_time, retry_time_low, retry_time_high):
raise ChromeProxyMetricException, (
'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
- p['proxy'], str(got_retry_time), str(retry_time_low),
+ bad_proxy['proxy'], str(got_retry_time), str(retry_time_low),
str(retry_time_high)))
- return True
def VerifyAllProxiesBypassed(self, tab):
if tab:
@@ -243,7 +263,8 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
if not is_bypassed:
raise ChromeProxyMetricException, (
'Chrome proxy should be bypassed. proxy info: %s' % info)
- self.VerifyBadProxies(info['badProxies'], expected_bad_proxies)
+ self.VerifyBadProxies(info['badProxies'],
+ [{'proxy': p} for p in expected_bad_proxies])
def AddResultsForBypass(self, tab, results):
bypass_count = 0
@@ -259,6 +280,31 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
results.AddValue(scalar.ScalarValue(
results.current_page, 'bypass', 'count', bypass_count))
+ def AddResultsForFallback(self, tab, results):
+ via_proxy_count = 0
+ bypass_count = 0
+ for resp in self.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ via_proxy_count += 1
+ elif resp.ShouldHaveChromeProxyViaHeader():
+ bypass_count += 1
+
+ if bypass_count != 1:
+ raise ChromeProxyMetricException, (
+ 'Only the triggering response should have bypassed all proxies.')
+
+ info = GetProxyInfoFromNetworkInternals(tab)
+ if not 'enabled' in info or not info['enabled']:
+ raise ChromeProxyMetricException, (
+ 'Chrome proxy should be enabled. proxy info: %s' % info)
+ self.VerifyBadProxies(info['badProxies'],
+ [{'proxy': self.effective_proxies['proxy']}])
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'via_proxy', 'count', via_proxy_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
+
def AddResultsForCorsBypass(self, tab, results):
eligible_response_count = 0
bypass_count = 0
@@ -352,11 +398,6 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
count, safebrowsing_count))
- def ProxyListForDev(self, proxies):
- return [self.effective_proxies['proxy-dev']
- if proxy == self.effective_proxies['proxy']
- else proxy for proxy in proxies]
-
def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies):
info = GetProxyInfoFromNetworkInternals(tab)
if not 'enabled' in info or not info['enabled']:
@@ -395,3 +436,22 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
self.VerifyAllProxiesBypassed(tab)
results.AddValue(scalar.ScalarValue(
results.current_page, 'direct_fallback', 'boolean', True))
+
+ def AddResultsForExplicitBypass(self, tab, results, expected_bad_proxies):
+ """Verify results for an explicit bypass test.
+
+ Args:
+ tab: the tab for the test.
+ results: the results object to add the results values to.
+ expected_bad_proxies: A list of dictionary objects representing
+ expected bad proxies and their expected retry time windows.
+ See the definition of VerifyBadProxies for details.
+ """
+ info = GetProxyInfoFromNetworkInternals(tab)
+ if not 'enabled' in info or not info['enabled']:
+ raise ChromeProxyMetricException, (
+ 'Chrome proxy should be enabled. proxy info: %s' % info)
+ self.VerifyBadProxies(info['badProxies'],
+ expected_bad_proxies)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'explicit_bypass', 'boolean', True))
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py b/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py
new file mode 100644
index 0000000..c2b6d30
--- /dev/null
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py
@@ -0,0 +1,82 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from integration_tests import chrome_proxy_measurements as measurements
+from integration_tests import chrome_proxy_metrics as metrics
+from telemetry.page import page as page_module
+from telemetry.page import page_set as page_set_module
+
+
+class ExplicitBypassPage(page_module.Page):
+ """A test page for the explicit bypass tests.
+
+ Attributes:
+ num_bypassed_proxies: The number of proxies that should be bypassed as a
+ direct result of loading this test page. 1 indicates that only the
+ primary data reduction proxy should be bypassed, while 2 indicates
+ that both the primary and fallback data reduction proxies should be
+ bypassed.
+ bypass_seconds_low: The minimum number of seconds that the bypass
+ triggered by loading this page should last.
+ bypass_seconds_high: The maximum number of seconds that the bypass
+ triggered by loading this page should last.
+ """
+
+ def __init__(self,
+ url,
+ page_set,
+ num_bypassed_proxies,
+ bypass_seconds_low,
+ bypass_seconds_high):
+ super(ExplicitBypassPage, self).__init__(url=url, page_set=page_set)
+ self.num_bypassed_proxies = num_bypassed_proxies
+ self.bypass_seconds_low = bypass_seconds_low
+ self.bypass_seconds_high = bypass_seconds_high
+
+
+class ExplicitBypassPageSet(page_set_module.PageSet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(ExplicitBypassPageSet, self).__init__()
+
+ # Test page for "Chrome-Proxy: bypass=0".
+ self.AddPage(ExplicitBypassPage(
+ url=measurements.GetResponseOverrideURL(
+ respHeader='{"Chrome-Proxy":["bypass=0"],'
+ '"Via":["1.1 Chrome-Compression-Proxy"]}'),
+ page_set=self,
+ num_bypassed_proxies=1,
+ bypass_seconds_low=metrics.DEFAULT_BYPASS_MIN_SECONDS,
+ bypass_seconds_high=metrics.DEFAULT_BYPASS_MAX_SECONDS))
+
+ # Test page for "Chrome-Proxy: bypass=3600".
+ self.AddPage(ExplicitBypassPage(
+ url=measurements.GetResponseOverrideURL(
+ respHeader='{"Chrome-Proxy":["bypass=3600"],'
+ '"Via":["1.1 Chrome-Compression-Proxy"]}'),
+ page_set=self,
+ num_bypassed_proxies=1,
+ bypass_seconds_low=3600,
+ bypass_seconds_high=3600))
+
+ # Test page for "Chrome-Proxy: block=0".
+ self.AddPage(ExplicitBypassPage(
+ url=measurements.GetResponseOverrideURL(
+ respHeader='{"Chrome-Proxy":["block=0"],'
+ '"Via":["1.1 Chrome-Compression-Proxy"]}'),
+ page_set=self,
+ num_bypassed_proxies=2,
+ bypass_seconds_low=metrics.DEFAULT_BYPASS_MIN_SECONDS,
+ bypass_seconds_high=metrics.DEFAULT_BYPASS_MAX_SECONDS))
+
+ # Test page for "Chrome-Proxy: block=3600".
+ self.AddPage(ExplicitBypassPage(
+ url=measurements.GetResponseOverrideURL(
+ respHeader='{"Chrome-Proxy":["block=3600"],'
+ '"Via":["1.1 Chrome-Compression-Proxy"]}'),
+ page_set=self,
+ num_bypassed_proxies=2,
+ bypass_seconds_low=3600,
+ bypass_seconds_high=3600))
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback.py b/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback.py
new file mode 100644
index 0000000..43a9447
--- /dev/null
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback.py
@@ -0,0 +1,24 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry.page import page_set as page_set_module
+
+
+class FallbackPage(page_module.Page):
+ def __init__(self, url, page_set):
+ super(FallbackPage, self).__init__(url=url, page_set=page_set)
+
+
+class FallbackPageSet(page_set_module.PageSet):
+ """ Chrome proxy test sites """
+ def __init__(self):
+ super(FallbackPageSet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/fallback',
+ ]
+
+ for url in urls_list:
+ self.AddPage(FallbackPage(url, self))