summaryrefslogtreecommitdiffstats
path: root/tools/chrome_proxy
diff options
context:
space:
mode:
authorsclittle <sclittle@chromium.org>2015-01-14 18:00:58 -0800
committerCommit bot <commit-bot@chromium.org>2015-01-15 02:01:41 +0000
commit6d76812fe3713cfcbb55818c40f75cee092b9de8 (patch)
tree7f372711ead444da27340fd2935a07880c684786 /tools/chrome_proxy
parent73f4cdc4af163550f8a0188611f1b8b00602bd95 (diff)
downloadchromium_src-6d76812fe3713cfcbb55818c40f75cee092b9de8.zip
chromium_src-6d76812fe3713cfcbb55818c40f75cee092b9de8.tar.gz
chromium_src-6d76812fe3713cfcbb55818c40f75cee092b9de8.tar.bz2
Remove the net-internals dependency from chrome proxy telemetry tests.
This change fully removes any dependency on chrome://net-internals from the data reduction proxy telemetry tests. Also, The ExplicitBypass test has now been replaced with the ReenableAfterBypass test, which verifies that the data reduction proxy will be re-enabled after a bypass expires. The tests can now all run on android webview, although several of the tests are for features that webview does not support: namely safebrowsing and the HTTP fallback proxy tests. BUG=440522, 447317, 447054 Review URL: https://codereview.chromium.org/792303006 Cr-Commit-Position: refs/heads/master@{#311609}
Diffstat (limited to 'tools/chrome_proxy')
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py14
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py62
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_metrics.js72
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py309
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py169
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py82
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py41
-rw-r--r--tools/chrome_proxy/integration_tests/network_metrics.py7
-rw-r--r--tools/chrome_proxy/integration_tests/network_metrics_unittest.py4
9 files changed, 312 insertions, 448 deletions
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py b/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
index 86935dc..03ca98f 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
@@ -112,30 +112,30 @@ class ChromeProxySafeBrowsing(benchmark.Benchmark):
@benchmark.Enabled('android')
class ChromeProxyHTTPFallbackProbeURL(benchmark.Benchmark):
- tag = 'fallback-probe'
+ tag = 'fallback_probe'
test = measurements.ChromeProxyHTTPFallbackProbeURL
page_set = pagesets.SyntheticPageSet
@benchmark.Enabled('android')
class ChromeProxyHTTPFallbackViaHeader(benchmark.Benchmark):
- tag = 'fallback-viaheader'
+ tag = 'fallback_viaheader'
test = measurements.ChromeProxyHTTPFallbackViaHeader
page_set = pagesets.FallbackViaHeaderPageSet
@benchmark.Enabled('android')
class ChromeProxyHTTPToDirectFallback(benchmark.Benchmark):
- tag = 'http-to-direct-fallback'
+ tag = 'http_to_direct_fallback'
test = measurements.ChromeProxyHTTPToDirectFallback
page_set = pagesets.HTTPToDirectFallbackPageSet
@benchmark.Enabled('android')
-class ChromeProxyExplicitBypass(benchmark.Benchmark):
- tag = 'explicit-bypass'
- test = measurements.ChromeProxyExplicitBypass
- page_set = pagesets.ExplicitBypassPageSet
+class ChromeProxyReenableAfterBypass(benchmark.Benchmark):
+ tag = 'reenable_after_bypass'
+ test = measurements.ChromeProxyReenableAfterBypass
+ page_set = pagesets.ReenableAfterBypassPageSet
@benchmark.Enabled('android')
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py b/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
index 5550356..eadc010 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
@@ -229,16 +229,9 @@ class ChromeProxyHTTPFallbackViaHeader(ChromeProxyValidation):
options.AppendExtraBrowserArgs('--ignore-certificate-errors')
options.AppendExtraBrowserArgs(
'--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER)
- options.AppendExtraBrowserArgs(
- '--spdy-proxy-auth-value=%s' % _FAKE_PROXY_AUTH_VALUE)
def AddResults(self, tab, results):
- proxies = [
- _TEST_SERVER + ":80",
- self._metrics.effective_proxies['fallback'],
- self._metrics.effective_proxies['direct']]
- bad_proxies = [_TEST_SERVER + ":80", metrics.PROXY_SETTING_HTTP]
- self._metrics.AddResultsForHTTPFallback(tab, results, proxies, bad_proxies)
+ self._metrics.AddResultsForHTTPFallback(tab, results)
class ChromeProxyClientVersion(ChromeProxyValidation):
@@ -299,66 +292,33 @@ class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation):
'--spdy-proxy-auth-origin=http://nonexistent.googlezip.net')
def WillNavigateToPage(self, page, tab):
+ super(ChromeProxyHTTPToDirectFallback, self).WillNavigateToPage(page, tab)
# Attempt to load a page through the nonexistent primary proxy in order to
# cause a proxy fallback, and have this test run starting from the HTTP
# fallback proxy.
tab.Navigate(_TEST_SERVER_DEFAULT_URL)
tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
- proxies = [
- 'nonexistent.googlezip.net:80',
- self._metrics.effective_proxies['fallback'],
- self._metrics.effective_proxies['direct']]
- # TODO(sclittle): Remove this dependency on net-internals#proxy once an
- # alternative method of verifying that Chrome is on the fallback proxy
- # exists.
- self._metrics.VerifyProxyInfo(tab, proxies, proxies[:1])
- super(ChromeProxyHTTPToDirectFallback, self).WillNavigateToPage(page, tab)
def AddResults(self, tab, results):
self._metrics.AddResultsForHTTPToDirectFallback(tab, results)
-class ChromeProxyExplicitBypass(ChromeProxyValidation):
- """Correctness measurement for explicit proxy bypasses.
+class ChromeProxyReenableAfterBypass(ChromeProxyValidation):
+ """Correctness measurement for re-enabling proxies after bypasses.
- In this test, the configured proxy is the chromeproxy-test server which
- will send back a response without the expected Via header. Chrome is
- expected to use the fallback proxy and add the configured proxy to the
- bad proxy list.
+ This test loads a page that causes all data reduction proxies to be bypassed
+ for 1 to 5 minutes, then waits 5 minutes and verifies that the proxy is no
+ longer bypassed.
"""
def __init__(self):
- super(ChromeProxyExplicitBypass, self).__init__(
+ super(ChromeProxyReenableAfterBypass, self).__init__(
restart_after_each_page=True)
- def CustomizeBrowserOptions(self, options):
- super(ChromeProxyExplicitBypass,
- self).CustomizeBrowserOptions(options)
- options.AppendExtraBrowserArgs('--ignore-certificate-errors')
- options.AppendExtraBrowserArgs(
- '--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER)
- options.AppendExtraBrowserArgs(
- '--spdy-proxy-auth-value=%s' % _FAKE_PROXY_AUTH_VALUE)
-
def AddResults(self, tab, results):
- bad_proxies = [{
- 'proxy': _TEST_SERVER + ':80',
- 'retry_seconds_low': self._page.bypass_seconds_low,
- 'retry_seconds_high': self._page.bypass_seconds_high
- }]
- if self._page.num_bypassed_proxies == 2:
- bad_proxies.append({
- 'proxy': self._metrics.effective_proxies['fallback'],
- 'retry_seconds_low': self._page.bypass_seconds_low,
- 'retry_seconds_high': self._page.bypass_seconds_high
- })
- else:
- # Even if the test page only causes the primary proxy to be bypassed,
- # Chrome will attempt to fetch the favicon for the test server through
- # the data reduction proxy, which will cause a "block=0" bypass.
- bad_proxies.append({'proxy': self._metrics.effective_proxies['fallback']})
-
- self._metrics.AddResultsForExplicitBypass(tab, results, bad_proxies)
+ self._metrics.AddResultsForReenableAfterBypass(
+ tab, results, self._page.bypass_seconds_min,
+ self._page.bypass_seconds_max)
class ChromeProxySmoke(ChromeProxyValidation):
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.js b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.js
deleted file mode 100644
index 63fc81f..0000000
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.js
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file contains common utilities to find chrome proxy related elements on
-// a page and collect info from them.
-
-(function() {
- var PROXY_VIEW_ID = 'proxy-view-tab-content';
- var PROXY_VIEW_EFFECTIVE_SETTINGS_ID = 'proxy-view-effective-settings';
- var PROXY_VIEW_BAD_PROXIES_ID = 'proxy-view-bad-proxies-div';
- var PROXY_VIEW_BAD_PROXIES_TBODY = 'proxy-view-bad-proxies-tbody';
- var PRXOY_SETTINGS_PREFIX = 'Proxy server for HTTP: ['
- var PROXY_SETTINGS_SIGNATURE = 'proxy.googlezip.net:443, ' +
- 'compress.googlezip.net:80, direct://';
-
- // Returns the effective proxy in an array from settings.
- // An example of the settings is:
- // "Proxy server for HTTP: [proxy.googlezip.net:443, " +
- // "compress.googlezip.net:80, direct://]"
- function getEffectiveProxies(doc) {
- var settings = doc.getElementById(PROXY_VIEW_EFFECTIVE_SETTINGS_ID);
- if (settings && settings.innerHTML &&
- settings.innerHTML.indexOf(PRXOY_SETTINGS_PREFIX) == 0) {
- var left = settings.innerHTML.indexOf('[');
- var right = settings.innerHTML.indexOf(']');
- if (left >= 0 && right > left) {
- return settings.innerHTML.substring(left + 1, right).split(/[ ,]+/);
- }
- }
- return [];
- }
-
- // Returns an array of bad proxies. Each element is a bad proxy with
- // attribute 'proxy' as the proxy name and attribute 'retry' as the
- // next retry time.
- function getBadProxyList(doc) {
- var bad_proxies = doc.getElementById(PROXY_VIEW_BAD_PROXIES_ID);
- if (bad_proxies.hasAttribute('style') &&
- ('cssText' in bad_proxies.style) &&
- bad_proxies.style.cssText == 'display: none;') {
- return null;
- }
- var tbody = doc.getElementById(PROXY_VIEW_BAD_PROXIES_TBODY);
- results = [];
- for (var r = 0, n = tbody.rows.length; r < n; r++) {
- results[r] = {};
- results[r].proxy = tbody.rows[r].cells[0].innerHTML;
- timeSpan = tbody.rows[r].cells[1].getElementsByTagName('span')[0];
- if (timeSpan.hasAttribute('title') && timeSpan.title.indexOf('t=') == 0) {
- results[r].retry = timeSpan.title.substr(2);
- } else {
- results[r].retry = '-1';
- }
- }
- return results;
- }
-
- function getChromeProxyInfo() {
- if (!document.getElementById(PROXY_VIEW_ID)) {
- return null;
- }
- info = {};
- info.proxies = getEffectiveProxies(document);
- info.enabled = (info.proxies.length > 1 &&
- info.proxies[info.proxies.length - 1] == 'direct://' &&
- info.proxies[info.proxies.length - 2] != 'direct://');
- info.badProxies = getBadProxyList(document);
- return info;
- };
- window.__getChromeProxyInfo = getChromeProxyInfo;
-})();
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
index a92ef14..8857a56 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
@@ -2,12 +2,10 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import datetime
import logging
-import os
+import time
from integration_tests import network_metrics
-from telemetry.core import util
from telemetry.page import page_test
from telemetry.value import scalar
@@ -19,41 +17,6 @@ class ChromeProxyMetricException(page_test.MeasurementFailure):
CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
-PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
-PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
-PROXY_DEV_SETTING_HTTP = 'proxy-xt.googlezip.net:80'
-PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
-PROXY_SETTING_DIRECT = 'direct://'
-
-# The default Chrome Proxy bypass time is a range from one to five mintues.
-# See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
-DEFAULT_BYPASS_MIN_SECONDS = 60
-DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
-
-def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
- tab.Navigate(url)
- with open(os.path.join(os.path.dirname(__file__),
- 'chrome_proxy_metrics.js')) as f:
- js = f.read()
- tab.ExecuteJavaScript(js)
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
-
- # Sometimes, the proxy information on net_internals#proxy is slow to come up.
- # In order to prevent this from causing tests to flake frequently, wait for
- # up to 10 seconds for this information to appear.
- def IsDataReductionProxyEnabled():
- info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
- return info['enabled']
-
- util.WaitFor(IsDataReductionProxyEnabled, 10)
- info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
- return info
-
-
-def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
- return (retry_time >= low - datetime.timedelta(seconds=grace_seconds) and
- (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
-
class ChromeProxyResponse(network_metrics.HTTPResponse):
""" Represents an HTTP response from a timeleine event."""
@@ -126,12 +89,6 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
def __init__(self):
super(ChromeProxyMetric, self).__init__()
self.compute_data_saving = True
- self.effective_proxies = {
- "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
- "proxy-dev": PROXY_DEV_SETTING_HTTP,
- "fallback": PROXY_SETTING_HTTP,
- "direct": PROXY_SETTING_DIRECT,
- }
def SetEvents(self, events):
"""Used for unittest."""
@@ -168,6 +125,7 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
def AddResultsForHeaderValidation(self, tab, results):
via_count = 0
+
for resp in self.IterResponses(tab):
if resp.IsValidByViaHeader():
via_count += 1
@@ -233,110 +191,9 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
results.AddValue(scalar.ScalarValue(
results.current_page, 'bypass', 'count', bypass_count))
- def ProxyListForDev(self, proxies):
- return [self.effective_proxies['proxy-dev']
- if proxy == self.effective_proxies['proxy']
- else proxy for proxy in proxies]
-
- def IsProxyBypassed(self, tab):
- """Get whether all configured proxies are bypassed.
-
- Returns:
- A tuple of the form (boolean, string list). If all configured proxies
- are bypassed, then the return value will be (True, bypassed proxies).
- Otherwise, the return value will be (False, empty list).
- """
- if not tab:
- return False, []
-
- info = GetProxyInfoFromNetworkInternals(tab)
- if not info['enabled']:
- raise ChromeProxyMetricException, (
- 'Chrome proxy should be enabled. proxy info: %s' % info)
- if not info['badProxies']:
- return False, []
-
- bad_proxies = [str(p['proxy']) for p in info['badProxies']]
- # Expect all but the "direct://" proxy to be bad.
- expected_bad_proxies = info['proxies'][:-1]
- if set(bad_proxies) == set(expected_bad_proxies):
- return True, expected_bad_proxies
- return False, []
-
- def VerifyBadProxies(self, bad_proxies, expected_bad_proxies):
- """Verify the bad proxy list and their retry times are expected.
-
- Args:
- bad_proxies: the list of actual bad proxies and their retry times.
- expected_bad_proxies: a list of dictionaries in the form:
-
- {'proxy': <proxy origin>,
- 'retry_seconds_low': <minimum bypass duration in seconds>,
- 'retry_seconds_high': <maximum bypass duration in seconds>}
-
- If an element in the list is missing either the 'retry_seconds_low'
- entry or the 'retry_seconds_high' entry, the default bypass minimum
- and maximum durations respectively will be used for that element.
- """
- if not bad_proxies:
- bad_proxies = []
- if len(bad_proxies) != len(expected_bad_proxies):
- raise ChromeProxyMetricException, (
- 'Actual and expected bad proxy lists should match: %s vs. %s' % (
- str(bad_proxies), str(expected_bad_proxies)))
-
- # Check that each of the proxy origins and retry times match.
- for expected_bad_proxy in expected_bad_proxies:
- # Find a matching actual bad proxy origin, allowing for the proxy-dev
- # origin in the place of the HTTPS proxy origin.
- bad_proxy = None
- for actual_proxy in bad_proxies:
- if (expected_bad_proxy['proxy'] == actual_proxy['proxy'] or (
- self.effective_proxies['proxy-dev'] == actual_proxy['proxy'] and
- self.effective_proxies['proxy'] == expected_bad_proxy['proxy'])):
- bad_proxy = actual_proxy
- break
- if not bad_proxy:
- raise ChromeProxyMetricException, (
- 'No match for expected bad proxy %s - actual and expected bad '
- 'proxies should match: %s vs. %s' % (expected_bad_proxy['proxy'],
- str(bad_proxies),
- str(expected_bad_proxies)))
-
- # Check that the retry times match.
- retry_seconds_low = expected_bad_proxy.get('retry_seconds_low',
- DEFAULT_BYPASS_MIN_SECONDS)
- retry_seconds_high = expected_bad_proxy.get('retry_seconds_high',
- DEFAULT_BYPASS_MAX_SECONDS)
- retry_time_low = (datetime.datetime.now() +
- datetime.timedelta(seconds=retry_seconds_low))
- retry_time_high = (datetime.datetime.now() +
- datetime.timedelta(seconds=retry_seconds_high))
- got_retry_time = datetime.datetime.fromtimestamp(
- int(bad_proxy['retry'])/1000)
- if not ProxyRetryTimeInRange(
- got_retry_time, retry_time_low, retry_time_high):
- raise ChromeProxyMetricException, (
- 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
- bad_proxy['proxy'], str(got_retry_time), str(retry_time_low),
- str(retry_time_high)))
-
- def VerifyAllProxiesBypassed(self, tab):
- """Verify that all proxies are bypassed for 1 to 5 minutes."""
- if tab:
- info = GetProxyInfoFromNetworkInternals(tab)
- if not info['enabled']:
- raise ChromeProxyMetricException, (
- 'Chrome proxy should be enabled. proxy info: %s' % info)
- is_bypassed, expected_bad_proxies = self.IsProxyBypassed(tab)
- if not is_bypassed:
- raise ChromeProxyMetricException, (
- 'Chrome proxy should be bypassed. proxy info: %s' % info)
- self.VerifyBadProxies(info['badProxies'],
- [{'proxy': p} for p in expected_bad_proxies])
-
def AddResultsForBypass(self, tab, results):
bypass_count = 0
+
for resp in self.IterResponses(tab):
if resp.HasChromeProxyViaHeader():
r = resp.response
@@ -389,6 +246,7 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
def AddResultsForBlockOnce(self, tab, results):
eligible_response_count = 0
bypass_count = 0
+
for resp in self.IterResponses(tab):
if resp.ShouldHaveChromeProxyViaHeader():
eligible_response_count += 1
@@ -415,6 +273,7 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
def AddResultsForSafebrowsing(self, tab, results):
count = 0
safebrowsing_count = 0
+
for resp in self.IterResponses(tab):
count += 1
if resp.IsSafebrowsingResponse():
@@ -433,76 +292,130 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
count, safebrowsing_count))
- def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies):
- info = GetProxyInfoFromNetworkInternals(tab)
- if not 'enabled' in info or not info['enabled']:
- raise ChromeProxyMetricException, (
- 'Chrome proxy should be enabled. proxy info: %s' % info)
- proxies = info['proxies']
- if (set(proxies) != set(expected_proxies) and
- set(proxies) != set(self.ProxyListForDev(expected_proxies))):
- raise ChromeProxyMetricException, (
- 'Wrong effective proxies (%s). Expect: "%s"' % (
- str(proxies), str(expected_proxies)))
-
- bad_proxies = []
- if 'badProxies' in info and info['badProxies']:
- bad_proxies = [p['proxy'] for p in info['badProxies']
- if 'proxy' in p and p['proxy']]
- if (set(bad_proxies) != set(expected_bad_proxies) and
- set(bad_proxies) != set(self.ProxyListForDev(expected_bad_proxies))):
- raise ChromeProxyMetricException, (
- 'Wrong bad proxies (%s). Expect: "%s"' % (
- str(bad_proxies), str(expected_bad_proxies)))
-
- def AddResultsForHTTPFallback(
- self, tab, results, expected_proxies=None, expected_bad_proxies=None):
- if not expected_proxies:
- expected_proxies = [self.effective_proxies['fallback'],
- self.effective_proxies['direct']]
- if not expected_bad_proxies:
- expected_bad_proxies = []
-
- # TODO(sclittle): Remove this dependency on net-internals#proxy once an
- # alternative method of verifying that Chrome is on the fallback proxy
- # exists.
- self.VerifyProxyInfo(tab, expected_proxies, expected_bad_proxies)
+ def AddResultsForHTTPFallback(self, tab, results):
+ via_fallback_count = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.ShouldHaveChromeProxyViaHeader():
+ # All responses should have come through the HTTP fallback proxy, which
+ # means that they should have the via header, and if a remote port is
+ # defined, it should be port 80.
+ if (not resp.HasChromeProxyViaHeader() or
+ (resp.remote_port and resp.remote_port != 80)):
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ '%s: Should have come through the fallback proxy.\n'
+ 'Reponse: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, str(resp.remote_port), r.status, r.status_text,
+ r.headers))
+ via_fallback_count += 1
+
results.AddValue(scalar.ScalarValue(
- results.current_page, 'http_fallback', 'boolean', True))
+ results.current_page, 'via_fallback', 'count', via_fallback_count))
def AddResultsForHTTPToDirectFallback(self, tab, results):
+ via_fallback_count = 0
bypass_count = 0
- for resp in self.IterResponses(tab):
+ responses = self.IterResponses(tab)
+
+ # The very first response should be through the HTTP fallback proxy.
+ fallback_resp = next(responses, None)
+ if not fallback_resp:
+ raise ChromeProxyMetricException, 'There should be at least one response.'
+ elif (not fallback_resp.HasChromeProxyViaHeader() or
+ fallback_resp.remote_port != 80):
+ r = fallback_resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should have come through the fallback proxy.\n'
+ 'Reponse: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, str(fallback_resp.remote_port), r.status, r.status_text,
+ r.headers))
+ else:
+ via_fallback_count += 1
+
+ # All other responses should have been bypassed.
+ for resp in responses:
if resp.HasChromeProxyViaHeader():
r = resp.response
raise ChromeProxyMetricException, (
- 'Response for %s should not have via header. '
+ 'Response for %s should not have via header.\n'
'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
r.url, r.status, r.status_text, r.headers))
else:
bypass_count += 1
results.AddValue(scalar.ScalarValue(
+ results.current_page, 'via_fallback', 'count', via_fallback_count))
+ results.AddValue(scalar.ScalarValue(
results.current_page, 'bypass', 'count', bypass_count))
- def AddResultsForExplicitBypass(self, tab, results, expected_bad_proxies):
- """Verify results for an explicit bypass test.
+ def AddResultsForReenableAfterBypass(
+ self, tab, results, bypass_seconds_min, bypass_seconds_max):
+ """Verify results for a re-enable after bypass test.
Args:
tab: the tab for the test.
results: the results object to add the results values to.
- expected_bad_proxies: A list of dictionary objects representing
- expected bad proxies and their expected retry time windows.
- See the definition of VerifyBadProxies for details.
+ bypass_seconds_min: the minimum duration of the bypass.
+ bypass_seconds_max: the maximum duration of the bypass.
"""
- info = GetProxyInfoFromNetworkInternals(tab)
- if not 'enabled' in info or not info['enabled']:
- raise ChromeProxyMetricException, (
- 'Chrome proxy should be enabled. proxy info: %s' % info)
- # TODO(sclittle): Remove this dependency on net-internals#proxy once an
- # alternative method of verifying that Chrome is on the fallback proxy
- # exists.
- self.VerifyBadProxies(info['badProxies'],
- expected_bad_proxies)
+ bypass_count = 0
+ via_count = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should not have via header.\n'
+ 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ bypass_count += 1
+
+ # Wait until 30 seconds before the bypass should expire, and fetch a page.
+ # It should not have the via header because the proxy should still be
+ # bypassed.
+ time.sleep(bypass_seconds_min - 30)
+
+ tab.ClearCache(force=True)
+ before_metrics = ChromeProxyMetric()
+ before_metrics.Start(results.current_page, tab)
+ tab.Navigate('http://chromeproxy-test.appspot.com/default')
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ before_metrics.Stop(results.current_page, tab)
+
+ for resp in before_metrics.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should not have via header; proxy should still '
+ 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ bypass_count += 1
+
+ # Wait until 30 seconds after the bypass should expire, and fetch a page. It
+ # should have the via header since the proxy should no longer be bypassed.
+ time.sleep((bypass_seconds_max + 30) - (bypass_seconds_min - 30))
+
+ tab.ClearCache(force=True)
+ after_metrics = ChromeProxyMetric()
+ after_metrics.Start(results.current_page, tab)
+ tab.Navigate('http://chromeproxy-test.appspot.com/default')
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ after_metrics.Stop(results.current_page, tab)
+
+ for resp in after_metrics.IterResponses(tab):
+ if not resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should have via header; proxy should no longer '
+ 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ via_count += 1
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
results.AddValue(scalar.ScalarValue(
- results.current_page, 'explicit_bypass', 'boolean', True))
+ results.current_page, 'via', 'count', via_count))
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
index 37fdb5f..9396a09 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
@@ -12,7 +12,7 @@ from metrics import test_page_test_results
# Timeline events used in tests.
# An HTML not via proxy.
-EVENT_HTML_PROXY = network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+EVENT_HTML_DIRECT = network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.html1',
response_headers={
'Content-Type': 'text/html',
@@ -20,6 +20,32 @@ EVENT_HTML_PROXY = network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
},
body=network_unittest.HTML_BODY)
+# An HTML via proxy.
+EVENT_HTML_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=network_unittest.HTML_BODY,
+ remote_port=443))
+
+# An HTML via the HTTP fallback proxy.
+EVENT_HTML_PROXY_VIA_HTTP_FALLBACK = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=network_unittest.HTML_BODY,
+ remote_port=80))
+
# An HTML via proxy with the deprecated Via header.
EVENT_HTML_PROXY_DEPRECATED_VIA = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
@@ -33,6 +59,34 @@ EVENT_HTML_PROXY_DEPRECATED_VIA = (
},
body=network_unittest.HTML_BODY))
+# An image via proxy with Via header.
+EVENT_IMAGE_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ remote_port=443))
+
+# An image via the HTTP fallback proxy.
+EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ remote_port=80))
+
# An image via proxy with Via header and it is cached.
EVENT_IMAGE_PROXY_CACHED = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
@@ -136,7 +190,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
def testChromeProxyMetricForDataSaving(self):
metric = metrics.ChromeProxyMetric()
events = [
- EVENT_HTML_PROXY,
+ EVENT_HTML_DIRECT,
EVENT_HTML_PROXY_DEPRECATED_VIA,
EVENT_IMAGE_PROXY_CACHED,
EVENT_IMAGE_DIRECT]
@@ -153,7 +207,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
def testChromeProxyMetricForHeaderValidation(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([
- EVENT_HTML_PROXY,
+ EVENT_HTML_DIRECT,
EVENT_HTML_PROXY_DEPRECATED_VIA,
EVENT_IMAGE_PROXY_CACHED,
EVENT_IMAGE_DIRECT])
@@ -178,7 +232,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
def testChromeProxyMetricForBypass(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([
- EVENT_HTML_PROXY,
+ EVENT_HTML_DIRECT,
EVENT_HTML_PROXY_DEPRECATED_VIA,
EVENT_IMAGE_PROXY_CACHED,
EVENT_IMAGE_DIRECT])
@@ -206,44 +260,24 @@ class ChromeProxyMetricTest(unittest.TestCase):
metric.AddResultsForCorsBypass(None, results)
results.AssertHasPageSpecificScalarValue('cors_bypass', 'count', 1)
-
- def testChromeProxyMetricForHTTPFallback(self):
+ def testChromeProxyMetricForBlockOnce(self):
metric = metrics.ChromeProxyMetric()
- metric.SetEvents([
- EVENT_HTML_PROXY,
- EVENT_HTML_PROXY_DEPRECATED_VIA])
+ metric.SetEvents([EVENT_HTML_DIRECT,
+ EVENT_IMAGE_PROXY_VIA])
results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForBlockOnce(None, results)
+ results.AssertHasPageSpecificScalarValue('eligible_responses', 'count', 2)
+ results.AssertHasPageSpecificScalarValue('bypass', 'count', 1)
- fallback_exception = False
- info = {}
- info['enabled'] = False
- self._StubGetProxyInfo(info)
- try:
- metric.AddResultsForBypass(None, results)
- except metrics.ChromeProxyMetricException:
- fallback_exception = True
- self.assertTrue(fallback_exception)
-
- fallback_exception = False
- info['enabled'] = True
- info['proxies'] = [
- 'something.else.com:80',
- metrics.PROXY_SETTING_DIRECT
- ]
- self._StubGetProxyInfo(info)
+ metric.SetEvents([EVENT_HTML_DIRECT,
+ EVENT_IMAGE_DIRECT])
+ exception_occurred = False
try:
- metric.AddResultsForBypass(None, results)
+ metric.AddResultsForBlockOnce(None, results)
except metrics.ChromeProxyMetricException:
- fallback_exception = True
- self.assertTrue(fallback_exception)
-
- info['enabled'] = True
- info['proxies'] = [
- metrics.PROXY_SETTING_HTTP,
- metrics.PROXY_SETTING_DIRECT
- ]
- self._StubGetProxyInfo(info)
- metric.AddResultsForHTTPFallback(None, results)
+ exception_occurred = True
+ # The second response was over direct, but was expected via proxy.
+ self.assertTrue(exception_occurred)
def testChromeProxyMetricForSafebrowsing(self):
metric = metrics.ChromeProxyMetric()
@@ -258,3 +292,64 @@ class ChromeProxyMetricTest(unittest.TestCase):
metric.SetEvents([])
metric.AddResultsForSafebrowsing(None, results)
results.AssertHasPageSpecificScalarValue('safebrowsing', 'boolean', True)
+
+ def testChromeProxyMetricForHTTPFallback(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK])
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForHTTPFallback(None, results)
+ results.AssertHasPageSpecificScalarValue('via_fallback', 'count', 2)
+
+ metric.SetEvents([EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_PROXY_VIA])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPFallback(None, results)
+ except metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The responses came through the SPDY proxy, but were expected through the
+ # HTTP fallback proxy.
+ self.assertTrue(exception_occurred)
+
+ def testChromeProxyMetricForHTTPToDirectFallback(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_HTML_DIRECT,
+ EVENT_IMAGE_DIRECT])
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForHTTPToDirectFallback(None, results)
+ results.AssertHasPageSpecificScalarValue('via_fallback', 'count', 1)
+ results.AssertHasPageSpecificScalarValue('bypass', 'count', 2)
+
+ metric.SetEvents([EVENT_HTML_PROXY_VIA,
+ EVENT_HTML_DIRECT])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPToDirectFallback(None, results)
+ except metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The first response was expected through the HTTP fallback proxy.
+ self.assertTrue(exception_occurred)
+
+ metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPToDirectFallback(None, results)
+ except metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # All but the first response were expected to be over direct.
+ self.assertTrue(exception_occurred)
+
+ metric.SetEvents([EVENT_HTML_DIRECT,
+ EVENT_HTML_DIRECT,
+ EVENT_IMAGE_DIRECT])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPToDirectFallback(None, results)
+ except metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The first response was expected through the HTTP fallback proxy.
+ self.assertTrue(exception_occurred)
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py b/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py
deleted file mode 100644
index 615b268..0000000
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/explicit_bypass.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from integration_tests import chrome_proxy_measurements as measurements
-from integration_tests import chrome_proxy_metrics as metrics
-from telemetry.page import page as page_module
-from telemetry.page import page_set as page_set_module
-
-
-class ExplicitBypassPage(page_module.Page):
- """A test page for the explicit bypass tests.
-
- Attributes:
- num_bypassed_proxies: The number of proxies that should be bypassed as a
- direct result of loading this test page. 1 indicates that only the
- primary data reduction proxy should be bypassed, while 2 indicates
- that both the primary and fallback data reduction proxies should be
- bypassed.
- bypass_seconds_low: The minimum number of seconds that the bypass
- triggered by loading this page should last.
- bypass_seconds_high: The maximum number of seconds that the bypass
- triggered by loading this page should last.
- """
-
- def __init__(self,
- url,
- page_set,
- num_bypassed_proxies,
- bypass_seconds_low,
- bypass_seconds_high):
- super(ExplicitBypassPage, self).__init__(url=url, page_set=page_set)
- self.num_bypassed_proxies = num_bypassed_proxies
- self.bypass_seconds_low = bypass_seconds_low
- self.bypass_seconds_high = bypass_seconds_high
-
-
-class ExplicitBypassPageSet(page_set_module.PageSet):
- """ Chrome proxy test sites """
-
- def __init__(self):
- super(ExplicitBypassPageSet, self).__init__()
-
- # Test page for "Chrome-Proxy: bypass=0".
- self.AddUserStory(ExplicitBypassPage(
- url=measurements.GetResponseOverrideURL(
- respHeader='{"Chrome-Proxy":["bypass=0"],'
- '"Via":["1.1 Chrome-Compression-Proxy"]}'),
- page_set=self,
- num_bypassed_proxies=1,
- bypass_seconds_low=metrics.DEFAULT_BYPASS_MIN_SECONDS,
- bypass_seconds_high=metrics.DEFAULT_BYPASS_MAX_SECONDS))
-
- # Test page for "Chrome-Proxy: bypass=3600".
- self.AddUserStory(ExplicitBypassPage(
- url=measurements.GetResponseOverrideURL(
- respHeader='{"Chrome-Proxy":["bypass=3600"],'
- '"Via":["1.1 Chrome-Compression-Proxy"]}'),
- page_set=self,
- num_bypassed_proxies=1,
- bypass_seconds_low=3600,
- bypass_seconds_high=3600))
-
- # Test page for "Chrome-Proxy: block=0".
- self.AddUserStory(ExplicitBypassPage(
- url=measurements.GetResponseOverrideURL(
- respHeader='{"Chrome-Proxy":["block=0"],'
- '"Via":["1.1 Chrome-Compression-Proxy"]}'),
- page_set=self,
- num_bypassed_proxies=2,
- bypass_seconds_low=metrics.DEFAULT_BYPASS_MIN_SECONDS,
- bypass_seconds_high=metrics.DEFAULT_BYPASS_MAX_SECONDS))
-
- # Test page for "Chrome-Proxy: block=3600".
- self.AddUserStory(ExplicitBypassPage(
- url=measurements.GetResponseOverrideURL(
- respHeader='{"Chrome-Proxy":["block=3600"],'
- '"Via":["1.1 Chrome-Compression-Proxy"]}'),
- page_set=self,
- num_bypassed_proxies=2,
- bypass_seconds_low=3600,
- bypass_seconds_high=3600))
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py b/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py
new file mode 100644
index 0000000..dc74029
--- /dev/null
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry.page import page_set as page_set_module
+
+
+class ReenableAfterBypassPage(page_module.Page):
+ """A test page for the re-enable after bypass tests.
+
+ Attributes:
+ bypass_seconds_min: The minimum number of seconds that the bypass
+ triggered by loading this page should last.
+ bypass_seconds_max: The maximum number of seconds that the bypass
+ triggered by loading this page should last.
+ """
+
+ def __init__(self,
+ url,
+ page_set,
+ bypass_seconds_min,
+ bypass_seconds_max):
+ super(ReenableAfterBypassPage, self).__init__(url=url, page_set=page_set)
+ self.bypass_seconds_min = bypass_seconds_min
+ self.bypass_seconds_max = bypass_seconds_max
+
+
+class ReenableAfterBypassPageSet(page_set_module.PageSet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(ReenableAfterBypassPageSet, self).__init__()
+
+ # Test page for "Chrome-Proxy: block=0". Loading this page should cause all
+ # data reduction proxies to be bypassed for one to five minutes.
+ self.AddUserStory(ReenableAfterBypassPage(
+ url="http://check.googlezip.net/block",
+ page_set=self,
+ bypass_seconds_min=60,
+ bypass_seconds_max=300))
diff --git a/tools/chrome_proxy/integration_tests/network_metrics.py b/tools/chrome_proxy/integration_tests/network_metrics.py
index 59155c9..747d60d 100644
--- a/tools/chrome_proxy/integration_tests/network_metrics.py
+++ b/tools/chrome_proxy/integration_tests/network_metrics.py
@@ -26,6 +26,9 @@ class HTTPResponse(object):
def __init__(self, event):
self._response = (
inspector_network.InspectorNetworkResponseData.FromTimelineEvent(event))
+ self._remote_port = None
+ if 'response' in event.args and 'remotePort' in event.args['response']:
+ self._remote_port = event.args['response']['remotePort']
self._content_length = None
@property
@@ -33,6 +36,10 @@ class HTTPResponse(object):
return self._response
@property
+ def remote_port(self):
+ return self._remote_port
+
+ @property
def url_signature(self):
return hashlib.md5(self.response.url).hexdigest()
diff --git a/tools/chrome_proxy/integration_tests/network_metrics_unittest.py b/tools/chrome_proxy/integration_tests/network_metrics_unittest.py
index 028bb23..8773c8e 100644
--- a/tools/chrome_proxy/integration_tests/network_metrics_unittest.py
+++ b/tools/chrome_proxy/integration_tests/network_metrics_unittest.py
@@ -27,7 +27,8 @@ class NetworkMetricTest(unittest.TestCase):
@staticmethod
def MakeNetworkTimelineEvent(
url, response_headers, body=None, base64_encoded_body=False,
- served_from_cache=False, request_headers=None, status=200):
+ served_from_cache=False, request_headers=None, status=200,
+ remote_port=None):
if not request_headers:
request_headers = {}
e = event.TimelineEvent('network', 'HTTPResponse', 0, 0)
@@ -38,6 +39,7 @@ class NetworkMetricTest(unittest.TestCase):
'url': url,
'headers': response_headers,
'requestHeaders': request_headers,
+ 'remotePort': remote_port,
}
e.args['body'] = body
e.args['base64_encoded_body'] = base64_encoded_body