summaryrefslogtreecommitdiffstats
path: root/tools/chrome_proxy/common/chrome_proxy_measurements.py
blob: 1628cceb208221e9e1c1090ef03c4763790638dd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import base64
import logging

from common import chrome_proxy_metrics as metrics
from telemetry.core import exceptions
from telemetry.page import page_test


def WaitForViaHeader(tab, url="http://check.googlezip.net/test.html"):
  """Wait until responses start coming back with the Chrome Proxy via header.

  Poll |url| in |tab| until the Chrome Proxy via header is present in a
  response.

  This function is useful when testing with the Data Saver API, since Chrome
  won't actually start sending requests to the Data Reduction Proxy until the
  Data Saver API fetch completes. This function can be used to wait for the Data
  Saver API fetch to complete.
  """

  tab.Navigate('data:text/html;base64,%s' % base64.b64encode(
    '<html><body><script>'
    'function ProbeViaHeader(url, wanted_via) {'
      'try {'
        'var xmlhttp = new XMLHttpRequest();'
        'xmlhttp.timeout = 15;'
        'xmlhttp.open("HEAD",url,false);'
        'xmlhttp.send();'
        'var via=xmlhttp.getResponseHeader("via");'
        'return (via && via.indexOf(wanted_via) != -1);'
       '} catch (err) {'
         'return false;'
       '}'
    '}'
    '</script>'
    'Waiting for Chrome to start using the DRP...'
    '</body></html>'))

  # Ensure the page has started loading before attempting the DRP check.
  tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
  tab.WaitForJavaScriptExpression(
    'ProbeViaHeader("%s", "%s")' % (url, metrics.CHROME_PROXY_VIA_HEADER), 300)


class ChromeProxyValidation(page_test.PageTest):
  """Base class for all chrome proxy correctness measurements."""

  # Value of the extra via header. |None| if no extra via header is expected.
  extra_via_header = None

  def __init__(self, restart_after_each_page=False, metrics=None):
    super(ChromeProxyValidation, self).__init__(
        needs_browser_restart_after_each_page=restart_after_each_page)
    self._metrics = metrics
    self._page = None

  def CustomizeBrowserOptions(self, options):
    # Enable the chrome proxy (data reduction proxy).
    options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')

  def WillNavigateToPage(self, page, tab):
    WaitForViaHeader(tab)

    tab.ClearCache(force=True)
    assert self._metrics
    self._metrics.Start(page, tab)

  def ValidateAndMeasurePage(self, page, tab, results):
    self._page = page
    # Wait for the load event.
    tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
    assert self._metrics
    self._metrics.Stop(page, tab)
    if ChromeProxyValidation.extra_via_header:
      self._metrics.AddResultsForExtraViaHeader(
          tab, results, ChromeProxyValidation.extra_via_header)
    self.AddResults(tab, results)

  def AddResults(self, tab, results):
    raise NotImplementedError

  def StopBrowserAfterPage(self, browser, page):  # pylint: disable=W0613
    if hasattr(page, 'restart_after') and page.restart_after:
      return True
    return False