summaryrefslogtreecommitdiffstats
path: root/tools/perf/measurements/blink_style.py
blob: b46eab153b22eefb22cca4767c92ec8a170d936f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

from itertools import starmap
from collections import defaultdict
from telemetry.core import util
from telemetry.core import exceptions
from telemetry.page import action_runner
from telemetry.page import page_test
from telemetry.value import scalar

from measurements import timeline_controller


class BlinkStyle(page_test.PageTest):
  def __init__(self):
    super(BlinkStyle, self).__init__()
    self._controller = None

  def WillNavigateToPage(self, page, tab):
    self._controller = timeline_controller.TimelineController()
    self._controller.trace_categories = 'blink_style,blink.console'
    self._controller.SetUp(page, tab)
    self._controller.Start(tab)

  def CleanUpAfterPage(self, page, tab):
    if self._controller:
      self._controller.CleanUp(tab)

  def ValidateAndMeasurePage(self, page, tab, results):
    runner = action_runner.ActionRunner(tab)
    with runner.CreateInteraction('wait-for-quiescence'):
      tab.ExecuteJavaScript('console.time("");')
      try:
        util.WaitFor(tab.HasReachedQuiescence, 15)
      except exceptions.TimeoutException:
        # Some sites never reach quiesence. As this benchmark normalizes/
        # categories results, it shouldn't be necessary to reach the same
        # state on every run.
        pass

    tab.ExecuteJavaScript(
        'console.time("style-update");'
        # Occasionally documents will break the APIs we need.
        'try {'
        # Invalidate style for the whole document.
        '  document && (document.documentElement.lang += "z");'
        # Force a style update (but not layout).
        '  getComputedStyle(document.documentElement).color;'
        '} catch (e) {}'
        'console.timeEnd("style-update");'
        )

    self._controller.Stop(tab, results)
    renderer = self._controller.model.GetRendererThreadFromTabId(tab.id)
    markers = [event for event in renderer.async_slices
               if event.name == 'style-update'
                  and event.category == 'blink.console']
    assert len(markers) == 1
    marker = markers[0]

    def duration(event):
      if event.has_thread_timestamps:
        return event.thread_duration
      else:
        return event.duration

    for event in renderer.all_slices:
      if (event.name == 'Document::updateStyle'
          and event.start >= marker.start
          and event.end <= marker.end):
        access_count = event.args.get('resolverAccessCount')
        if access_count is None:
          # absent in earlier versions
          continue
        min_access_count = 50

        if access_count >= min_access_count:
          result = 1000 * (duration(event) / access_count)
          results.AddValue(scalar.ScalarValue(
              page, 'update_style', 'ms/1000 elements', result))

    class ParserEvent(object):
      def __init__(self, summary_event, tokenize_event, parse_event):
        min_sheet_length = 1000
        ua_sheet_mode = 5
        enormous_token_threshold = 100
        large_token_threshold = 5

        self.mode = summary_event.args.get('mode')
        self.length = summary_event.args.get('length')
        self.tokens = summary_event.args.get('tokenCount')
        self.tokenize_duration = duration(tokenize_event)
        self.parse_duration = duration(parse_event)
        self.chars_per_token = 0
        if self.tokens:
          self.chars_per_token = self.length / float(self.tokens)
        if self.mode == ua_sheet_mode or self.length < min_sheet_length:
          self.category = 'ignored'
        elif self.chars_per_token > enormous_token_threshold:
          self.category = 'enormous_tokens'
        elif self.chars_per_token > large_token_threshold:
          self.category = 'large_tokens'
        else:
          self.category = 'regular'

    parser_events = [event for event in renderer.all_slices
                     if event.name == 'CSSParserImpl::parseStyleSheet'
                     or event.name == 'CSSParserImpl::parseStyleSheet.tokenize'
                     or event.name == 'CSSParserImpl::parseStyleSheet.parse']

    merged_events = starmap(ParserEvent, zip(*[iter(parser_events)]*3))

    events_by_category = defaultdict(list)
    for event in merged_events:
      if event.category != 'ignored':
        events_by_category[event.category].append(event)

    for category, events in events_by_category.items():
      parse_duration = sum(event.parse_duration for event in events)
      tokenize_duration = sum(event.tokenize_duration for event in events)
      tokens = sum(event.tokens for event in events)
      length = sum(event.length for event in events)

      results.AddValue(
          scalar.ScalarValue(page, ('parse_css_%s' % category),
                             'tokens/s', 1000 / (parse_duration / tokens)))

      results.AddValue(
          scalar.ScalarValue(page, ('tokenize_css_%s' % category),
                             'char/s',  1000 / (tokenize_duration / length)))