summaryrefslogtreecommitdiffstats
path: root/tools/chrome_proxy/common/chrome_proxy_measurements.py
blob: ab8fd671a5e8897becdf4d91d088673dc910de3a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import logging

from common import chrome_proxy_metrics as metrics
from telemetry.core import exceptions
from telemetry.page import page_test

class ChromeProxyValidation(page_test.PageTest):
  """Base class for all chrome proxy correctness measurements."""

  # Value of the extra via header. |None| if no extra via header is expected.
  extra_via_header = None

  def __init__(self, restart_after_each_page=False, metrics=None):
    super(ChromeProxyValidation, self).__init__(
        needs_browser_restart_after_each_page=restart_after_each_page)
    self._metrics = metrics
    self._page = None
    # Whether a timeout exception is expected during the test.
    self._expect_timeout = False

  def CustomizeBrowserOptions(self, options):
    # Enable the chrome proxy (data reduction proxy).
    options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')

  def WillNavigateToPage(self, page, tab):
    tab.ClearCache(force=True)
    assert self._metrics
    self._metrics.Start(page, tab)

  def ValidateAndMeasurePage(self, page, tab, results):
    self._page = page
    # Wait for the load event.
    tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
    assert self._metrics
    self._metrics.Stop(page, tab)
    if ChromeProxyValidation.extra_via_header:
      self._metrics.AddResultsForExtraViaHeader(
          tab, results, ChromeProxyValidation.extra_via_header)
    self.AddResults(tab, results)

  def AddResults(self, tab, results):
    raise NotImplementedError

  def StopBrowserAfterPage(self, browser, page):  # pylint: disable=W0613
    if hasattr(page, 'restart_after') and page.restart_after:
      return True
    return False

  def RunNavigateSteps(self, page, tab):
    # The redirect from safebrowsing causes a timeout. Ignore that.
    try:
      super(ChromeProxyValidation, self).RunNavigateSteps(page, tab)
      if self._expect_timeout:
        raise metrics.ChromeProxyMetricException, (
            'Timeout was expected, but did not occur')
    except exceptions.TimeoutException as e:
      if self._expect_timeout:
        logging.warning('Navigation timeout on page %s',
                        page.name if page.name else page.url)
      else:
        raise e