summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorbustamante <bustamante@google.com>2015-05-05 11:13:12 -0700
committerCommit bot <commit-bot@chromium.org>2015-05-05 18:14:35 +0000
commit84809a6eb565c1bc54abb25c261f94ed2b4b3c83 (patch)
tree43e6a6087a6d3e0cbd5aefa194cad522ee27bb11
parent74014e0e926e2170188c9d7cc4706ccba36448c9 (diff)
downloadchromium_src-84809a6eb565c1bc54abb25c261f94ed2b4b3c83.zip
chromium_src-84809a6eb565c1bc54abb25c261f94ed2b4b3c83.tar.gz
chromium_src-84809a6eb565c1bc54abb25c261f94ed2b4b3c83.tar.bz2
Move top_20 tests to a separate suite
We don't want run these tests on the continuous waterfall as they take a lot of time and can be flaky, but they're still valuable to ensure chrome_proxy works on the "Top 20" sites and can be run as needed. BUG=480032 Review URL: https://codereview.chromium.org/1098253004 Cr-Commit-Position: refs/heads/master@{#328357}
-rw-r--r--tools/chrome_proxy/common/__init__.py0
-rw-r--r--tools/chrome_proxy/common/chrome_proxy_benchmark.py21
-rw-r--r--tools/chrome_proxy/common/chrome_proxy_measurements.py65
-rw-r--r--tools/chrome_proxy/common/chrome_proxy_metrics.py89
-rw-r--r--tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py44
-rw-r--r--tools/chrome_proxy/common/network_metrics.py (renamed from tools/chrome_proxy/integration_tests/network_metrics.py)0
-rw-r--r--tools/chrome_proxy/common/network_metrics_unittest.py (renamed from tools/chrome_proxy/integration_tests/network_metrics_unittest.py)2
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py98
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py109
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py84
-rw-r--r--tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py105
-rw-r--r--tools/chrome_proxy/live_tests/__init__.py0
-rw-r--r--tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py49
-rw-r--r--tools/chrome_proxy/live_tests/chrome_proxy_measurements.py47
-rw-r--r--tools/chrome_proxy/live_tests/chrome_proxy_metrics.py126
-rw-r--r--tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py106
-rw-r--r--tools/chrome_proxy/live_tests/pagesets/__init__.py20
-rw-r--r--tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20.json (renamed from tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/data/chrome_proxy_top_20.json)0
-rw-r--r--tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20_000.wpr.sha1 (renamed from tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/data/chrome_proxy_top_20_000.wpr.sha1)0
-rw-r--r--tools/chrome_proxy/live_tests/pagesets/top_20.py (renamed from tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/top_20.py)0
-rwxr-xr-xtools/chrome_proxy/run_livetests20
21 files changed, 656 insertions, 329 deletions
diff --git a/tools/chrome_proxy/common/__init__.py b/tools/chrome_proxy/common/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/chrome_proxy/common/__init__.py
diff --git a/tools/chrome_proxy/common/chrome_proxy_benchmark.py b/tools/chrome_proxy/common/chrome_proxy_benchmark.py
new file mode 100644
index 0000000..7c4aa82
--- /dev/null
+++ b/tools/chrome_proxy/common/chrome_proxy_benchmark.py
@@ -0,0 +1,21 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from common import chrome_proxy_measurements as measurements
+from telemetry import benchmark
+
+
+class ChromeProxyBenchmark(benchmark.Benchmark):
+ @classmethod
+ def AddCommandLineArgs(cls, parser):
+ parser.add_option(
+ '--extra-chrome-proxy-via-header',
+ type='string', dest="extra_header",
+ help='Adds an expected Via header for the Chrome-Proxy tests.')
+
+ @classmethod
+ def ProcessCommandLineArgs(cls, parser, args):
+ if args.extra_header:
+ measurements.ChromeProxyValidation.extra_via_header = args.extra_header
+
diff --git a/tools/chrome_proxy/common/chrome_proxy_measurements.py b/tools/chrome_proxy/common/chrome_proxy_measurements.py
new file mode 100644
index 0000000..ab8fd67
--- /dev/null
+++ b/tools/chrome_proxy/common/chrome_proxy_measurements.py
@@ -0,0 +1,65 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from common import chrome_proxy_metrics as metrics
+from telemetry.core import exceptions
+from telemetry.page import page_test
+
+class ChromeProxyValidation(page_test.PageTest):
+ """Base class for all chrome proxy correctness measurements."""
+
+ # Value of the extra via header. |None| if no extra via header is expected.
+ extra_via_header = None
+
+ def __init__(self, restart_after_each_page=False, metrics=None):
+ super(ChromeProxyValidation, self).__init__(
+ needs_browser_restart_after_each_page=restart_after_each_page)
+ self._metrics = metrics
+ self._page = None
+ # Whether a timeout exception is expected during the test.
+ self._expect_timeout = False
+
+ def CustomizeBrowserOptions(self, options):
+ # Enable the chrome proxy (data reduction proxy).
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+ assert self._metrics
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ self._page = page
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ assert self._metrics
+ self._metrics.Stop(page, tab)
+ if ChromeProxyValidation.extra_via_header:
+ self._metrics.AddResultsForExtraViaHeader(
+ tab, results, ChromeProxyValidation.extra_via_header)
+ self.AddResults(tab, results)
+
+ def AddResults(self, tab, results):
+ raise NotImplementedError
+
+ def StopBrowserAfterPage(self, browser, page): # pylint: disable=W0613
+ if hasattr(page, 'restart_after') and page.restart_after:
+ return True
+ return False
+
+ def RunNavigateSteps(self, page, tab):
+ # The redirect from safebrowsing causes a timeout. Ignore that.
+ try:
+ super(ChromeProxyValidation, self).RunNavigateSteps(page, tab)
+ if self._expect_timeout:
+ raise metrics.ChromeProxyMetricException, (
+ 'Timeout was expected, but did not occur')
+ except exceptions.TimeoutException as e:
+ if self._expect_timeout:
+ logging.warning('Navigation timeout on page %s',
+ page.name if page.name else page.url)
+ else:
+ raise e
diff --git a/tools/chrome_proxy/common/chrome_proxy_metrics.py b/tools/chrome_proxy/common/chrome_proxy_metrics.py
new file mode 100644
index 0000000..f28cbb8
--- /dev/null
+++ b/tools/chrome_proxy/common/chrome_proxy_metrics.py
@@ -0,0 +1,89 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from common import network_metrics
+from telemetry.page import page_test
+from telemetry.value import scalar
+
+
+CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
+
+
+class ChromeProxyMetricException(page_test.MeasurementFailure):
+ pass
+
+
+class ChromeProxyResponse(network_metrics.HTTPResponse):
+ """ Represents an HTTP response from a timeline event."""
+ def __init__(self, event):
+ super(ChromeProxyResponse, self).__init__(event)
+
+ def ShouldHaveChromeProxyViaHeader(self):
+ resp = self.response
+ # Ignore https and data url
+ if resp.url.startswith('https') or resp.url.startswith('data:'):
+ return False
+ # Ignore 304 Not Modified and cache hit.
+ if resp.status == 304 or resp.served_from_cache:
+ return False
+ # Ignore invalid responses that don't have any header. Log a warning.
+ if not resp.headers:
+ logging.warning('response for %s does not any have header '
+ '(refer=%s, status=%s)',
+ resp.url, resp.GetHeader('Referer'), resp.status)
+ return False
+ return True
+
+ def HasChromeProxyViaHeader(self):
+ via_header = self.response.GetHeader('Via')
+ if not via_header:
+ return False
+ vias = [v.strip(' ') for v in via_header.split(',')]
+ # The Via header is valid if it has a 4-character version prefix followed by
+ # the proxy name, for example, "1.1 Chrome-Compression-Proxy".
+ return any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias)
+
+ def HasExtraViaHeader(self, extra_header):
+ via_header = self.response.GetHeader('Via')
+ if not via_header:
+ return False
+ vias = [v.strip(' ') for v in via_header.split(',')]
+ return any(v == extra_header for v in vias)
+
+ def IsValidByViaHeader(self):
+ return (not self.ShouldHaveChromeProxyViaHeader() or
+ self.HasChromeProxyViaHeader())
+
+ def GetChromeProxyClientType(self):
+ """Get the client type directive from the Chrome-Proxy request header.
+
+ Returns:
+ The client type directive from the Chrome-Proxy request header for the
+ request that lead to this response. For example, if the request header
+ "Chrome-Proxy: c=android" is present, then this method would return
+ "android". Returns None if no client type directive is present.
+ """
+ if 'Chrome-Proxy' not in self.response.request_headers:
+ return None
+
+ chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy']
+ values = [v.strip() for v in chrome_proxy_request_header.split(',')]
+ for value in values:
+ kvp = value.split('=', 1)
+ if len(kvp) == 2 and kvp[0].strip() == 'c':
+ return kvp[1].strip()
+ return None
+
+ def HasChromeProxyLoFi(self):
+ if 'Chrome-Proxy' not in self.response.request_headers:
+ return False
+ chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy']
+ values = [v.strip() for v in chrome_proxy_request_header.split(',')]
+ for value in values:
+ if len(value) == 5 and value == 'q=low':
+ return True
+ return False
diff --git a/tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py b/tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py
new file mode 100644
index 0000000..35c9e1b
--- /dev/null
+++ b/tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py
@@ -0,0 +1,44 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import unittest
+
+from common import chrome_proxy_metrics as metrics
+from common import network_metrics_unittest as network_unittest
+
+
+class ChromeProxyMetricTest(unittest.TestCase):
+
+ def testChromeProxyResponse(self):
+ # An https non-proxy response.
+ resp = metrics.ChromeProxyResponse(
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='https://test.url',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': 'some other via',
+ },
+ body=network_unittest.HTML_BODY))
+ self.assertFalse(resp.ShouldHaveChromeProxyViaHeader())
+ self.assertFalse(resp.HasChromeProxyViaHeader())
+ self.assertTrue(resp.IsValidByViaHeader())
+
+ # A proxied JPEG image response
+ resp = metrics.ChromeProxyResponse(
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True))
+ self.assertTrue(resp.ShouldHaveChromeProxyViaHeader())
+ self.assertTrue(resp.HasChromeProxyViaHeader())
+ self.assertTrue(resp.IsValidByViaHeader())
+
diff --git a/tools/chrome_proxy/integration_tests/network_metrics.py b/tools/chrome_proxy/common/network_metrics.py
index 1af1260..1af1260 100644
--- a/tools/chrome_proxy/integration_tests/network_metrics.py
+++ b/tools/chrome_proxy/common/network_metrics.py
diff --git a/tools/chrome_proxy/integration_tests/network_metrics_unittest.py b/tools/chrome_proxy/common/network_metrics_unittest.py
index a78dd67..c40e526 100644
--- a/tools/chrome_proxy/integration_tests/network_metrics_unittest.py
+++ b/tools/chrome_proxy/common/network_metrics_unittest.py
@@ -5,7 +5,7 @@
import base64
import unittest
-from integration_tests import network_metrics
+from common import network_metrics
from telemetry.timeline import event
from telemetry.unittest_util import test_page_test_results
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py b/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
index b3d699c..e1fcb0a 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
@@ -2,8 +2,9 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from integration_tests import chrome_proxy_measurements as measurements
+from common.chrome_proxy_benchmark import ChromeProxyBenchmark
from integration_tests import chrome_proxy_pagesets as pagesets
+from integration_tests import chrome_proxy_measurements as measurements
from telemetry import benchmark
from telemetry.core.backends.chrome import android_browser_finder
@@ -12,101 +13,6 @@ ANDROID_CHROME_BROWSERS = [
browser for browser in android_browser_finder.CHROME_PACKAGE_NAMES
if 'webview' not in browser]
-class ChromeProxyLatency(benchmark.Benchmark):
- tag = 'latency'
- test = measurements.ChromeProxyLatency
- page_set = pagesets.Top20PageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.latency.top_20'
-
-
-class ChromeProxyLatencyDirect(benchmark.Benchmark):
- tag = 'latency_direct'
- test = measurements.ChromeProxyLatency
- page_set = pagesets.Top20PageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.latency_direct.top_20'
-
-
-class ChromeProxyLatencySynthetic(ChromeProxyLatency):
- page_set = pagesets.SyntheticPageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.latency.synthetic'
-
-
-class ChromeProxyLatencySyntheticDirect(ChromeProxyLatencyDirect):
- page_set = pagesets.SyntheticPageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.latency_direct.synthetic'
-
-
-class ChromeProxyDataSaving(benchmark.Benchmark):
- tag = 'data_saving'
- test = measurements.ChromeProxyDataSaving
- page_set = pagesets.Top20PageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.data_saving.top_20'
-
-
-class ChromeProxyDataSavingDirect(benchmark.Benchmark):
- tag = 'data_saving_direct'
- test = measurements.ChromeProxyDataSaving
- page_set = pagesets.Top20PageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.data_saving_direct.top_20'
-
-
-class ChromeProxyDataSavingSynthetic(ChromeProxyDataSaving):
- page_set = pagesets.SyntheticPageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.data_saving.synthetic'
-
-
-class ChromeProxyDataSavingSyntheticDirect(ChromeProxyDataSavingDirect):
- page_set = pagesets.SyntheticPageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.data_saving_direct.synthetic'
-
-
-class ChromeProxyHeaderValidation(benchmark.Benchmark):
- tag = 'header_validation'
- test = measurements.ChromeProxyHeaders
- page_set = pagesets.Top20PageSet
-
- @classmethod
- def Name(cls):
- return 'chrome_proxy_benchmark.header_validation.top_20'
-
-
-class ChromeProxyBenchmark(benchmark.Benchmark):
- @classmethod
- def AddCommandLineArgs(cls, parser):
- parser.add_option(
- '--extra-chrome-proxy-via-header',
- type='string', dest="extra_header",
- help='Adds an expected Via header for the Chrome-Proxy tests.')
-
- @classmethod
- def ProcessCommandLineArgs(cls, parser, args):
- if args.extra_header:
- measurements.ChromeProxyValidation.extra_via_header = args.extra_header
-
class ChromeProxyClientVersion(ChromeProxyBenchmark):
tag = 'client_version'
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py b/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
index 9b5a7e5..fba1d99 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
@@ -6,6 +6,7 @@ import base64
import logging
import urlparse
+from common.chrome_proxy_measurements import ChromeProxyValidation
from integration_tests import chrome_proxy_metrics as metrics
from metrics import loading
from telemetry.core import exceptions
@@ -50,68 +51,13 @@ class ChromeProxyDataSaving(page_test.PageTest):
self._metrics.AddResultsForDataSaving(tab, results)
-class ChromeProxyValidation(page_test.PageTest):
- """Base class for all chrome proxy correctness measurements."""
-
- # Value of the extra via header. |None| if no extra via header is expected.
- extra_via_header = None
-
- def __init__(self, restart_after_each_page=False):
- super(ChromeProxyValidation, self).__init__(
- needs_browser_restart_after_each_page=restart_after_each_page)
- self._metrics = metrics.ChromeProxyMetric()
- self._page = None
- # Whether a timeout exception is expected during the test.
- self._expect_timeout = False
-
- def CustomizeBrowserOptions(self, options):
- # Enable the chrome proxy (data reduction proxy).
- options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
-
- def WillNavigateToPage(self, page, tab):
- tab.ClearCache(force=True)
- assert self._metrics
- self._metrics.Start(page, tab)
-
- def ValidateAndMeasurePage(self, page, tab, results):
- self._page = page
- # Wait for the load event.
- tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
- assert self._metrics
- self._metrics.Stop(page, tab)
- if ChromeProxyValidation.extra_via_header:
- self._metrics.AddResultsForExtraViaHeader(
- tab, results, ChromeProxyValidation.extra_via_header)
- self.AddResults(tab, results)
-
- def AddResults(self, tab, results):
- raise NotImplementedError
-
- def StopBrowserAfterPage(self, browser, page): # pylint: disable=W0613
- if hasattr(page, 'restart_after') and page.restart_after:
- return True
- return False
-
- def RunNavigateSteps(self, page, tab):
- # The redirect from safebrowsing causes a timeout. Ignore that.
- try:
- super(ChromeProxyValidation, self).RunNavigateSteps(page, tab)
- if self._expect_timeout:
- raise metrics.ChromeProxyMetricException, (
- 'Timeout was expected, but did not occur')
- except exceptions.TimeoutException as e:
- if self._expect_timeout:
- logging.warning('Navigation timeout on page %s',
- page.name if page.name else page.url)
- else:
- raise e
-
-
class ChromeProxyHeaders(ChromeProxyValidation):
"""Correctness measurement for response headers."""
def __init__(self):
- super(ChromeProxyHeaders, self).__init__(restart_after_each_page=True)
+ super(ChromeProxyHeaders, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def AddResults(self, tab, results):
self._metrics.AddResultsForHeaderValidation(tab, results)
@@ -121,7 +67,9 @@ class ChromeProxyBypass(ChromeProxyValidation):
"""Correctness measurement for bypass responses."""
def __init__(self):
- super(ChromeProxyBypass, self).__init__(restart_after_each_page=True)
+ super(ChromeProxyBypass, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def AddResults(self, tab, results):
self._metrics.AddResultsForBypass(tab, results)
@@ -131,7 +79,9 @@ class ChromeProxyCorsBypass(ChromeProxyValidation):
"""Correctness measurement for bypass responses for CORS requests."""
def __init__(self):
- super(ChromeProxyCorsBypass, self).__init__(restart_after_each_page=True)
+ super(ChromeProxyCorsBypass, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def ValidateAndMeasurePage(self, page, tab, results):
# The test page sets window.xhrRequestCompleted to true when the XHR fetch
@@ -148,7 +98,9 @@ class ChromeProxyBlockOnce(ChromeProxyValidation):
"""Correctness measurement for block-once responses."""
def __init__(self):
- super(ChromeProxyBlockOnce, self).__init__(restart_after_each_page=True)
+ super(ChromeProxyBlockOnce, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def AddResults(self, tab, results):
self._metrics.AddResultsForBlockOnce(tab, results)
@@ -158,7 +110,8 @@ class ChromeProxySafebrowsingOn(ChromeProxyValidation):
"""Correctness measurement for safebrowsing."""
def __init__(self):
- super(ChromeProxySafebrowsingOn, self).__init__()
+ super(ChromeProxySafebrowsingOn, self).__init__(
+ metrics=metrics.ChromeProxyMetric())
def WillNavigateToPage(self, page, tab):
super(ChromeProxySafebrowsingOn, self).WillNavigateToPage(page, tab)
@@ -171,7 +124,8 @@ class ChromeProxySafebrowsingOff(ChromeProxyValidation):
"""Correctness measurement for safebrowsing."""
def __init__(self):
- super(ChromeProxySafebrowsingOff, self).__init__()
+ super(ChromeProxySafebrowsingOff, self).__init__(
+ metrics=metrics.ChromeProxyMetric())
def AddResults(self, tab, results):
self._metrics.AddResultsForSafebrowsingOff(tab, results)
@@ -218,7 +172,8 @@ class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation):
def __init__(self):
super(ChromeProxyHTTPFallbackProbeURL, self).__init__(
- restart_after_each_page=True)
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def CustomizeBrowserOptions(self, options):
super(ChromeProxyHTTPFallbackProbeURL,
@@ -247,7 +202,8 @@ class ChromeProxyHTTPFallbackViaHeader(ChromeProxyValidation):
def __init__(self):
super(ChromeProxyHTTPFallbackViaHeader, self).__init__(
- restart_after_each_page=True)
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def CustomizeBrowserOptions(self, options):
super(ChromeProxyHTTPFallbackViaHeader,
@@ -269,7 +225,8 @@ class ChromeProxyClientVersion(ChromeProxyValidation):
"""
def __init__(self):
- super(ChromeProxyClientVersion, self).__init__()
+ super(ChromeProxyClientVersion, self).__init__(
+ metrics=metrics.ChromeProxyMetric())
def CustomizeBrowserOptions(self, options):
super(ChromeProxyClientVersion,
@@ -284,7 +241,9 @@ class ChromeProxyClientType(ChromeProxyValidation):
"""Correctness measurement for Chrome-Proxy header client type directives."""
def __init__(self):
- super(ChromeProxyClientType, self).__init__(restart_after_each_page=True)
+ super(ChromeProxyClientType, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
self._chrome_proxy_client_type = None
def AddResults(self, tab, results):
@@ -306,7 +265,8 @@ class ChromeProxyLoFi(ChromeProxyValidation):
"""Correctness measurement for Lo-Fi in Chrome-Proxy header."""
def __init__(self):
- super(ChromeProxyLoFi, self).__init__(restart_after_each_page=True)
+ super(ChromeProxyLoFi, self).__init__(restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def CustomizeBrowserOptions(self, options):
super(ChromeProxyLoFi, self).CustomizeBrowserOptions(options)
@@ -323,7 +283,9 @@ class ChromeProxyExpDirective(ChromeProxyValidation):
"""
def __init__(self):
- super(ChromeProxyExpDirective, self).__init__(restart_after_each_page=True)
+ super(ChromeProxyExpDirective, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def CustomizeBrowserOptions(self, options):
super(ChromeProxyExpDirective, self).CustomizeBrowserOptions(options)
@@ -338,7 +300,8 @@ class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation):
def __init__(self):
super(ChromeProxyHTTPToDirectFallback, self).__init__(
- restart_after_each_page=True)
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def CustomizeBrowserOptions(self, options):
super(ChromeProxyHTTPToDirectFallback,
@@ -370,7 +333,8 @@ class ChromeProxyReenableAfterBypass(ChromeProxyValidation):
def __init__(self):
super(ChromeProxyReenableAfterBypass, self).__init__(
- restart_after_each_page=True)
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def AddResults(self, tab, results):
self._metrics.AddResultsForReenableAfterBypass(
@@ -382,7 +346,8 @@ class ChromeProxySmoke(ChromeProxyValidation):
"""Smoke measurement for basic chrome proxy correctness."""
def __init__(self):
- super(ChromeProxySmoke, self).__init__(restart_after_each_page=True)
+ super(ChromeProxySmoke, self).__init__(restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
def WillNavigateToPage(self, page, tab):
super(ChromeProxySmoke, self).WillNavigateToPage(page, tab)
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
index 68ba33f..d03bda9 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
@@ -5,89 +5,13 @@
import logging
import time
-from integration_tests import network_metrics
+from common import chrome_proxy_metrics
+from common import network_metrics
+from common.chrome_proxy_metrics import ChromeProxyMetricException
from telemetry.page import page_test
from telemetry.value import scalar
-class ChromeProxyMetricException(page_test.MeasurementFailure):
- pass
-
-
-CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
-
-
-class ChromeProxyResponse(network_metrics.HTTPResponse):
- """ Represents an HTTP response from a timeleine event."""
- def __init__(self, event):
- super(ChromeProxyResponse, self).__init__(event)
-
- def ShouldHaveChromeProxyViaHeader(self):
- resp = self.response
- # Ignore https and data url
- if resp.url.startswith('https') or resp.url.startswith('data:'):
- return False
- # Ignore 304 Not Modified and cache hit.
- if resp.status == 304 or resp.served_from_cache:
- return False
- # Ignore invalid responses that don't have any header. Log a warning.
- if not resp.headers:
- logging.warning('response for %s does not any have header '
- '(refer=%s, status=%s)',
- resp.url, resp.GetHeader('Referer'), resp.status)
- return False
- return True
-
- def HasChromeProxyViaHeader(self):
- via_header = self.response.GetHeader('Via')
- if not via_header:
- return False
- vias = [v.strip(' ') for v in via_header.split(',')]
- # The Via header is valid if it has a 4-character version prefix followed by
- # the proxy name, for example, "1.1 Chrome-Compression-Proxy".
- return any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias)
-
- def HasExtraViaHeader(self, extra_header):
- via_header = self.response.GetHeader('Via')
- if not via_header:
- return False
- vias = [v.strip(' ') for v in via_header.split(',')]
- return any(v == extra_header for v in vias)
-
- def IsValidByViaHeader(self):
- return (not self.ShouldHaveChromeProxyViaHeader() or
- self.HasChromeProxyViaHeader())
-
- def GetChromeProxyClientType(self):
- """Get the client type directive from the Chrome-Proxy request header.
-
- Returns:
- The client type directive from the Chrome-Proxy request header for the
- request that lead to this response. For example, if the request header
- "Chrome-Proxy: c=android" is present, then this method would return
- "android". Returns None if no client type directive is present.
- """
- if 'Chrome-Proxy' not in self.response.request_headers:
- return None
-
- chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy']
- values = [v.strip() for v in chrome_proxy_request_header.split(',')]
- for value in values:
- kvp = value.split('=', 1)
- if len(kvp) == 2 and kvp[0].strip() == 'c':
- return kvp[1].strip()
- return None
-
- def HasChromeProxyLoFi(self):
- if 'Chrome-Proxy' not in self.response.request_headers:
- return False
- chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy']
- values = [v.strip() for v in chrome_proxy_request_header.split(',')]
- for value in values:
- if len(value) == 5 and value == 'q=low':
- return True
- return False
-
class ChromeProxyMetric(network_metrics.NetworkMetric):
"""A Chrome proxy timeline metric."""
@@ -100,7 +24,7 @@ class ChromeProxyMetric(network_metrics.NetworkMetric):
self._events = events
def ResponseFromEvent(self, event):
- return ChromeProxyResponse(event)
+ return chrome_proxy_metrics.ChromeProxyResponse(event)
def AddResults(self, tab, results):
raise NotImplementedError
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
index b18ef5e..0f61619 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
+++ b/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
@@ -5,8 +5,9 @@
import base64
import unittest
+from common import chrome_proxy_metrics as common_metrics
+from common import network_metrics_unittest as network_unittest
from integration_tests import chrome_proxy_metrics as metrics
-from integration_tests import network_metrics_unittest as network_unittest
from telemetry.unittest_util import test_page_test_results
TEST_EXTRA_VIA_HEADER = '1.1 EXTRA_VIA_HEADER'
@@ -29,7 +30,7 @@ EVENT_HTML_PROXY_VIA = (
'Content-Type': 'text/html',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
},
body=network_unittest.HTML_BODY,
remote_port=443))
@@ -42,7 +43,7 @@ EVENT_HTML_PROXY_EXTRA_VIA = (
'Content-Type': 'text/html',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER + ", " +
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER + ", " +
TEST_EXTRA_VIA_HEADER,
},
body=network_unittest.HTML_BODY,
@@ -56,7 +57,7 @@ EVENT_HTML_PROXY_VIA_HTTP_FALLBACK = (
'Content-Type': 'text/html',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
},
body=network_unittest.HTML_BODY,
remote_port=80))
@@ -69,7 +70,7 @@ EVENT_IMAGE_PROXY_VIA = (
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True,
@@ -83,7 +84,7 @@ EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK = (
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True,
@@ -97,7 +98,7 @@ EVENT_IMAGE_PROXY_CACHED = (
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True,
@@ -120,7 +121,7 @@ EVENT_MALWARE_PROXY = (
url='http://test.malware',
response_headers={
'X-Malware-Url': '1',
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
'Location': 'http://test.malware',
},
status=307))
@@ -132,7 +133,7 @@ EVENT_IMAGE_BYPASS = (
response_headers={
'Chrome-Proxy': 'bypass=1',
'Content-Type': 'text/html',
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
},
status=502))
@@ -158,63 +159,6 @@ class ChromeProxyMetricTest(unittest.TestCase):
metrics.GetProxyInfoFromNetworkInternals = stub
ChromeProxyMetricTest._test_proxy_info = info
- def testChromeProxyResponse(self):
- # An https non-proxy response.
- resp = metrics.ChromeProxyResponse(
- network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
- url='https://test.url',
- response_headers={
- 'Content-Type': 'text/html',
- 'Content-Length': str(len(network_unittest.HTML_BODY)),
- 'Via': 'some other via',
- },
- body=network_unittest.HTML_BODY))
- self.assertFalse(resp.ShouldHaveChromeProxyViaHeader())
- self.assertFalse(resp.HasChromeProxyViaHeader())
- self.assertTrue(resp.IsValidByViaHeader())
-
- # A proxied JPEG image response
- resp = metrics.ChromeProxyResponse(
- network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
- url='http://test.image',
- response_headers={
- 'Content-Type': 'image/jpeg',
- 'Content-Encoding': 'gzip',
- 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
- 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
- },
- body=base64.b64encode(network_unittest.IMAGE_BODY),
- base64_encoded_body=True))
- self.assertTrue(resp.ShouldHaveChromeProxyViaHeader())
- self.assertTrue(resp.HasChromeProxyViaHeader())
- self.assertTrue(resp.IsValidByViaHeader())
-
- def testChromeProxyMetricForDataSaving(self):
- metric = metrics.ChromeProxyMetric()
- events = [
- EVENT_HTML_DIRECT,
- EVENT_HTML_PROXY_VIA,
- EVENT_IMAGE_PROXY_CACHED,
- EVENT_IMAGE_DIRECT]
- metric.SetEvents(events)
-
- self.assertTrue(len(events), len(list(metric.IterResponses(None))))
- results = test_page_test_results.TestPageTestResults(self)
-
- metric.AddResultsForDataSaving(None, results)
- results.AssertHasPageSpecificScalarValue('resources_via_proxy', 'count', 2)
- results.AssertHasPageSpecificScalarValue('resources_from_cache', 'count', 1)
- results.AssertHasPageSpecificScalarValue('resources_direct', 'count', 2)
-
- # Passing in zero responses should cause a failure.
- metric.SetEvents([])
- no_responses_exception = False
- try:
- metric.AddResultsForDataSaving(None, results)
- except metrics.ChromeProxyMetricException:
- no_responses_exception = True
- self.assertTrue(no_responses_exception)
-
def testChromeProxyMetricForHeaderValidation(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([
@@ -228,7 +172,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
missing_via_exception = False
try:
metric.AddResultsForHeaderValidation(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
missing_via_exception = True
# Only the HTTP image response does not have a valid Via header.
self.assertTrue(missing_via_exception)
@@ -245,7 +189,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
no_responses_exception = False
try:
metric.AddResultsForHeaderValidation(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
@@ -263,7 +207,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
exception_occurred = False
try:
metric.AddResultsForExtraViaHeader(None, results, TEST_EXTRA_VIA_HEADER)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
exception_occurred = True
# The response had the chrome proxy via header, but not the extra expected
# via header.
@@ -281,7 +225,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
bypass_exception = False
try:
metric.AddResultsForBypass(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
bypass_exception = True
# Two of the first three events have Via headers.
self.assertTrue(bypass_exception)
@@ -296,7 +240,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
no_responses_exception = False
try:
metric.AddResultsForBypass(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
@@ -314,7 +258,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
no_responses_exception = False
try:
metric.AddResultsForCorsBypass(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
@@ -332,7 +276,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
exception_occurred = False
try:
metric.AddResultsForBlockOnce(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
exception_occurred = True
# The second response was over direct, but was expected via proxy.
self.assertTrue(exception_occurred)
@@ -342,7 +286,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
no_responses_exception = False
try:
metric.AddResultsForBlockOnce(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
@@ -375,7 +319,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
exception_occurred = False
try:
metric.AddResultsForHTTPFallback(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
exception_occurred = True
# The responses came through the SPDY proxy, but were expected through the
# HTTP fallback proxy.
@@ -386,7 +330,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
no_responses_exception = False
try:
metric.AddResultsForHTTPFallback(None, results)
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
@@ -405,7 +349,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
exception_occurred = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
exception_occurred = True
# The first response was expected through the HTTP fallback proxy.
self.assertTrue(exception_occurred)
@@ -416,7 +360,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
exception_occurred = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
exception_occurred = True
# All but the first response were expected to be over direct.
self.assertTrue(exception_occurred)
@@ -427,7 +371,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
exception_occurred = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
exception_occurred = True
# The first response was expected through the HTTP fallback proxy.
self.assertTrue(exception_occurred)
@@ -437,6 +381,7 @@ class ChromeProxyMetricTest(unittest.TestCase):
no_responses_exception = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
- except metrics.ChromeProxyMetricException:
+ except common_metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
+
diff --git a/tools/chrome_proxy/live_tests/__init__.py b/tools/chrome_proxy/live_tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/chrome_proxy/live_tests/__init__.py
diff --git a/tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py b/tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py
new file mode 100644
index 0000000..6ed90c9
--- /dev/null
+++ b/tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py
@@ -0,0 +1,49 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from live_tests import chrome_proxy_measurements as measurements
+from live_tests import pagesets
+from telemetry import benchmark
+from telemetry.core.backends.chrome import android_browser_finder
+
+
+class ChromeProxyLatency(benchmark.Benchmark):
+ tag = 'latency'
+ test = measurements.ChromeProxyLatency
+ page_set = pagesets.Top20PageSet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.latency.top_20'
+
+
+class ChromeProxyLatencyDirect(benchmark.Benchmark):
+ tag = 'latency_direct'
+ test = measurements.ChromeProxyLatency
+ page_set = pagesets.Top20PageSet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.latency_direct.top_20'
+
+
+class ChromeProxyDataSaving(benchmark.Benchmark):
+ tag = 'data_saving'
+ test = measurements.ChromeProxyDataSaving
+ page_set = pagesets.Top20PageSet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.data_saving.top_20'
+
+
+class ChromeProxyDataSavingDirect(benchmark.Benchmark):
+ tag = 'data_saving_direct'
+ test = measurements.ChromeProxyDataSaving
+ page_set = pagesets.Top20PageSet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.data_saving_direct.top_20'
+
diff --git a/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py b/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
new file mode 100644
index 0000000..066c26f
--- /dev/null
+++ b/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
@@ -0,0 +1,47 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+import chrome_proxy_metrics as metrics
+from telemetry.core import exceptions
+from telemetry.page import page_test
+
+class ChromeProxyLatency(page_test.PageTest):
+ """Chrome proxy latency measurement."""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyLatency, self).__init__(*args, **kwargs)
+ self._metrics = metrics.ChromeProxyMetric()
+
+ def CustomizeBrowserOptions(self, options):
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ self._metrics.AddResultsForLatency(tab, results)
+
+
+class ChromeProxyDataSaving(page_test.PageTest):
+ """Chrome proxy data saving measurement."""
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyDataSaving, self).__init__(*args, **kwargs)
+ self._metrics = metrics.ChromeProxyMetric()
+
+ def CustomizeBrowserOptions(self, options):
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ self._metrics.Stop(page, tab)
+ self._metrics.AddResultsForDataSaving(tab, results)
diff --git a/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py b/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py
new file mode 100644
index 0000000..79e4f50
--- /dev/null
+++ b/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py
@@ -0,0 +1,126 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from common import chrome_proxy_metrics
+from common import network_metrics
+from common.chrome_proxy_metrics import ChromeProxyMetricException
+from telemetry.page import page_test
+from telemetry.value import scalar
+
+
+class ChromeProxyMetric(network_metrics.NetworkMetric):
+ """A Chrome proxy timeline metric."""
+
+ def __init__(self):
+ super(ChromeProxyMetric, self).__init__()
+ self.compute_data_saving = True
+
+ def SetEvents(self, events):
+ """Used for unittest."""
+ self._events = events
+
+ def ResponseFromEvent(self, event):
+ return chrome_proxy_metrics.ChromeProxyResponse(event)
+
+ def AddResults(self, tab, results):
+ raise NotImplementedError
+
+ def AddResultsForDataSaving(self, tab, results):
+ resources_via_proxy = 0
+ resources_from_cache = 0
+ resources_direct = 0
+
+ super(ChromeProxyMetric, self).AddResults(tab, results)
+ for resp in self.IterResponses(tab):
+ if resp.response.served_from_cache:
+ resources_from_cache += 1
+ if resp.HasChromeProxyViaHeader():
+ resources_via_proxy += 1
+ else:
+ resources_direct += 1
+
+ if resources_from_cache + resources_via_proxy + resources_direct == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response, but zero responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_via_proxy', 'count',
+ resources_via_proxy))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_from_cache', 'count',
+ resources_from_cache))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_direct', 'count', resources_direct))
+
+ def AddResultsForLatency(self, tab, results):
+ # TODO(bustamante): This is a hack to workaround crbug.com/467174,
+ # once fixed just pull down window.performance.timing object and
+ # reference that everywhere.
+ load_event_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.loadEventStart')
+ navigation_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.navigationStart')
+ dom_content_loaded_event_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.domContentLoadedEventStart')
+ fetch_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.fetchStart')
+ request_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.requestStart')
+ domain_lookup_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.domainLookupEnd')
+ domain_lookup_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.domainLookupStart')
+ connect_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.connectEnd')
+ connect_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.connectStart')
+ response_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.responseEnd')
+ response_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.responseStart')
+
+ # NavigationStart relative markers in milliseconds.
+ load_start = (float(load_event_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'load_start', 'ms', load_start))
+
+ dom_content_loaded_start = (
+ float(dom_content_loaded_event_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'dom_content_loaded_start', 'ms',
+ dom_content_loaded_start))
+
+ fetch_start = (float(fetch_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'fetch_start', 'ms', fetch_start,
+ important=False))
+
+ request_start = (float(request_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'request_start', 'ms', request_start,
+ important=False))
+
+ # Phase measurements in milliseconds.
+ domain_lookup_duration = (float(domain_lookup_end) - domain_lookup_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'domain_lookup_duration', 'ms',
+ domain_lookup_duration, important=False))
+
+ connect_duration = (float(connect_end) - connect_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'connect_duration', 'ms', connect_duration,
+ important=False))
+
+ request_duration = (float(response_start) - request_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'request_duration', 'ms', request_duration,
+ important=False))
+
+ response_duration = (float(response_end) - response_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'response_duration', 'ms', response_duration,
+ important=False))
diff --git a/tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py b/tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py
new file mode 100644
index 0000000..c2e2571
--- /dev/null
+++ b/tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py
@@ -0,0 +1,106 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import unittest
+
+from common import chrome_proxy_metrics as common_metrics
+from common import network_metrics_unittest as network_unittest
+from live_tests import chrome_proxy_metrics as metrics
+from telemetry.unittest_util import test_page_test_results
+
+TEST_EXTRA_VIA_HEADER = '1.1 EXTRA_VIA_HEADER'
+
+# Timeline events used in tests.
+# An HTML not via proxy.
+EVENT_HTML_DIRECT = network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html1',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(network_unittest.HTML_BODY)),
+ },
+ body=network_unittest.HTML_BODY)
+
+# An HTML via proxy.
+EVENT_HTML_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=network_unittest.HTML_BODY,
+ remote_port=443))
+
+# An image via proxy with Via header.
+EVENT_IMAGE_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ remote_port=443))
+
+# An image via proxy with Via header and it is cached.
+EVENT_IMAGE_PROXY_CACHED = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ served_from_cache=True))
+
+
+# An image fetched directly.
+EVENT_IMAGE_DIRECT = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True))
+
+
+class ChromeProxyMetricTest(unittest.TestCase):
+
+ def testChromeProxyMetricForDataSaving(self):
+ metric = metrics.ChromeProxyMetric()
+ events = [
+ EVENT_HTML_DIRECT,
+ EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_PROXY_CACHED,
+ EVENT_IMAGE_DIRECT]
+ metric.SetEvents(events)
+
+ self.assertTrue(len(events), len(list(metric.IterResponses(None))))
+ results = test_page_test_results.TestPageTestResults(self)
+
+ metric.AddResultsForDataSaving(None, results)
+ results.AssertHasPageSpecificScalarValue('resources_via_proxy', 'count', 2)
+ results.AssertHasPageSpecificScalarValue('resources_from_cache', 'count', 1)
+ results.AssertHasPageSpecificScalarValue('resources_direct', 'count', 2)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForDataSaving(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
diff --git a/tools/chrome_proxy/live_tests/pagesets/__init__.py b/tools/chrome_proxy/live_tests/pagesets/__init__.py
new file mode 100644
index 0000000..1ea4db1
--- /dev/null
+++ b/tools/chrome_proxy/live_tests/pagesets/__init__.py
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import inspect
+import os
+import sys
+
+from telemetry.core import discover
+from telemetry.page import page_set
+
+
+# Import all submodules' PageSet classes.
+start_dir = os.path.dirname(os.path.abspath(__file__))
+top_level_dir = os.path.abspath(os.path.join(start_dir, os.pardir, os.pardir))
+base_class = page_set.PageSet
+for cls in discover.DiscoverClasses(
+ start_dir, top_level_dir, base_class).values():
+ setattr(sys.modules[__name__], cls.__name__, cls)
+
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/data/chrome_proxy_top_20.json b/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20.json
index 7fdb557..7fdb557 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/data/chrome_proxy_top_20.json
+++ b/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20.json
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/data/chrome_proxy_top_20_000.wpr.sha1 b/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20_000.wpr.sha1
index b9a9f39..b9a9f39 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/data/chrome_proxy_top_20_000.wpr.sha1
+++ b/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20_000.wpr.sha1
diff --git a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/top_20.py b/tools/chrome_proxy/live_tests/pagesets/top_20.py
index 22ec8a6..22ec8a6 100644
--- a/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/top_20.py
+++ b/tools/chrome_proxy/live_tests/pagesets/top_20.py
diff --git a/tools/chrome_proxy/run_livetests b/tools/chrome_proxy/run_livetests
new file mode 100755
index 0000000..3246db6
--- /dev/null
+++ b/tools/chrome_proxy/run_livetests
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'telemetry'))
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'perf'))
+
+from telemetry import benchmark_runner
+
+
+if __name__ == '__main__':
+ top_level_dir = os.path.dirname(os.path.realpath(__file__))
+ environment = benchmark_runner.Environment(
+ top_level_dir=top_level_dir,
+ benchmark_dirs=[os.path.join(top_level_dir, 'live_tests')])
+ sys.exit(benchmark_runner.main(environment))