summaryrefslogtreecommitdiffstats
path: root/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py
blob: 79e4f50021ffd7f53ab00ef341ce88c098b9cf02 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import logging
import time

from common import chrome_proxy_metrics
from common import network_metrics
from common.chrome_proxy_metrics import ChromeProxyMetricException
from telemetry.page import page_test
from telemetry.value import scalar


class ChromeProxyMetric(network_metrics.NetworkMetric):
  """A Chrome proxy timeline metric."""

  def __init__(self):
    super(ChromeProxyMetric, self).__init__()
    self.compute_data_saving = True

  def SetEvents(self, events):
    """Used for unittest."""
    self._events = events

  def ResponseFromEvent(self, event):
    return chrome_proxy_metrics.ChromeProxyResponse(event)

  def AddResults(self, tab, results):
    raise NotImplementedError

  def AddResultsForDataSaving(self, tab, results):
    resources_via_proxy = 0
    resources_from_cache = 0
    resources_direct = 0

    super(ChromeProxyMetric, self).AddResults(tab, results)
    for resp in self.IterResponses(tab):
      if resp.response.served_from_cache:
        resources_from_cache += 1
      if resp.HasChromeProxyViaHeader():
        resources_via_proxy += 1
      else:
        resources_direct += 1

    if resources_from_cache + resources_via_proxy + resources_direct == 0:
      raise ChromeProxyMetricException, (
          'Expected at least one response, but zero responses were received.')

    results.AddValue(scalar.ScalarValue(
        results.current_page, 'resources_via_proxy', 'count',
        resources_via_proxy))
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'resources_from_cache', 'count',
        resources_from_cache))
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'resources_direct', 'count', resources_direct))

  def AddResultsForLatency(self, tab, results):
    # TODO(bustamante): This is a hack to workaround crbug.com/467174,
    #   once fixed just pull down window.performance.timing object and
    #   reference that everywhere.
    load_event_start = tab.EvaluateJavaScript(
        'window.performance.timing.loadEventStart')
    navigation_start = tab.EvaluateJavaScript(
        'window.performance.timing.navigationStart')
    dom_content_loaded_event_start = tab.EvaluateJavaScript(
        'window.performance.timing.domContentLoadedEventStart')
    fetch_start = tab.EvaluateJavaScript(
        'window.performance.timing.fetchStart')
    request_start = tab.EvaluateJavaScript(
        'window.performance.timing.requestStart')
    domain_lookup_end = tab.EvaluateJavaScript(
        'window.performance.timing.domainLookupEnd')
    domain_lookup_start = tab.EvaluateJavaScript(
        'window.performance.timing.domainLookupStart')
    connect_end = tab.EvaluateJavaScript(
        'window.performance.timing.connectEnd')
    connect_start = tab.EvaluateJavaScript(
        'window.performance.timing.connectStart')
    response_end = tab.EvaluateJavaScript(
        'window.performance.timing.responseEnd')
    response_start = tab.EvaluateJavaScript(
        'window.performance.timing.responseStart')

    # NavigationStart relative markers in milliseconds.
    load_start = (float(load_event_start) - navigation_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'load_start', 'ms', load_start))

    dom_content_loaded_start = (
      float(dom_content_loaded_event_start) - navigation_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'dom_content_loaded_start', 'ms',
        dom_content_loaded_start))

    fetch_start = (float(fetch_start) - navigation_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'fetch_start', 'ms', fetch_start,
        important=False))

    request_start = (float(request_start) - navigation_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'request_start', 'ms', request_start,
        important=False))

    # Phase measurements in milliseconds.
    domain_lookup_duration = (float(domain_lookup_end) - domain_lookup_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'domain_lookup_duration', 'ms',
        domain_lookup_duration, important=False))

    connect_duration = (float(connect_end) - connect_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'connect_duration', 'ms', connect_duration,
        important=False))

    request_duration = (float(response_start) - request_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'request_duration', 'ms', request_duration,
        important=False))

    response_duration = (float(response_end) - response_start)
    results.AddValue(scalar.ScalarValue(
        results.current_page, 'response_duration', 'ms', response_duration,
        important=False))