summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorernstm@chromium.org <ernstm@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-10-16 03:05:43 +0000
committerernstm@chromium.org <ernstm@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-10-16 03:05:43 +0000
commit15f8d132a8e59ea709c1ff5df3aa068c5ff83ced (patch)
tree22ca273d26b00d4f0840b4910dd921d4ddaaf098 /tools
parentccb0f51738610f39042972b14ddf04cbe0342fd4 (diff)
downloadchromium_src-15f8d132a8e59ea709c1ff5df3aa068c5ff83ced.zip
chromium_src-15f8d132a8e59ea709c1ff5df3aa068c5ff83ced.tar.gz
chromium_src-15f8d132a8e59ea709c1ff5df3aa068c5ff83ced.tar.bz2
telemetry: Improved handling of timeline markers.
- Actions can now be queried which timeline marker they issue. - Benchmarks search for those markers (possibly more than one), and extract all stats that overlap one of the timeline markers. - This should also fix crbug.com/295767 R=nduca@chromium.org,dominikg@chromium.org BUG=264308,295767 Review URL: https://codereview.chromium.org/24434002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@228847 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools')
-rw-r--r--tools/perf/measurements/rasterize_and_record.py12
-rw-r--r--tools/perf/measurements/smoothness.py47
-rw-r--r--tools/perf/metrics/rendering_stats.py124
-rw-r--r--tools/perf/metrics/smoothness.py61
-rw-r--r--tools/perf/metrics/smoothness_unittest.py31
-rw-r--r--tools/telemetry/telemetry/page/actions/page_action.py3
-rw-r--r--tools/telemetry/telemetry/page/actions/pinch.py3
-rw-r--r--tools/telemetry/telemetry/page/actions/scroll.py3
-rw-r--r--tools/telemetry/telemetry/page/actions/wait.py9
9 files changed, 190 insertions, 103 deletions
diff --git a/tools/perf/measurements/rasterize_and_record.py b/tools/perf/measurements/rasterize_and_record.py
index 41509bd..5018155 100644
--- a/tools/perf/measurements/rasterize_and_record.py
+++ b/tools/perf/measurements/rasterize_and_record.py
@@ -87,17 +87,17 @@ class RasterizeAndRecord(page_measurement.PageMeasurement):
self._metrics.Stop()
rendering_stats_deltas = self._metrics.deltas
timeline = tab.browser.StopTracing().AsTimelineModel()
- timeline_marker = smoothness.FindTimelineMarker(timeline,
- 'measureNextFrame')
- benchmark_stats = RenderingStats(timeline_marker,
- timeline_marker,
+ timeline_markers = smoothness.FindTimelineMarkers(timeline,
+ 'measureNextFrame')
+ benchmark_stats = RenderingStats(timeline_markers,
+ timeline_markers,
rendering_stats_deltas,
self._metrics.is_using_gpu_benchmarking)
results.Add('rasterize_time', 'ms',
- max(benchmark_stats.rasterize_time) * 1000.0)
+ max(benchmark_stats.rasterize_time))
results.Add('record_time', 'ms',
- max(benchmark_stats.record_time) * 1000.0)
+ max(benchmark_stats.record_time))
results.Add('rasterized_pixels', 'pixels',
max(benchmark_stats.rasterized_pixel_count))
results.Add('recorded_pixels', 'pixels',
diff --git a/tools/perf/measurements/smoothness.py b/tools/perf/measurements/smoothness.py
index 07981ca..7c0c8cd 100644
--- a/tools/perf/measurements/smoothness.py
+++ b/tools/perf/measurements/smoothness.py
@@ -1,10 +1,10 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import logging
from metrics import smoothness
from metrics.rendering_stats import RenderingStats
+from telemetry.page import page_test
from telemetry.page import page_measurement
@@ -19,6 +19,24 @@ class MissingDisplayFrameRate(page_measurement.MeasurementFailure):
'Missing display frame rate metrics: ' + name)
+class NoSupportedActionException(page_measurement.MeasurementFailure):
+ def __init__(self):
+ super(NoSupportedActionException, self).__init__(
+ 'None of the actions is supported by smoothness measurement')
+
+
+def GetTimelineMarkerLabelsFromAction(compound_action):
+ timeline_marker_labels = []
+ if not isinstance(compound_action, list):
+ compound_action = [compound_action]
+ for action in compound_action:
+ if action.GetTimelineMarkerLabel():
+ timeline_marker_labels.append(action.GetTimelineMarkerLabel())
+ if not timeline_marker_labels:
+ raise NoSupportedActionException()
+ return timeline_marker_labels
+
+
class Smoothness(page_measurement.PageMeasurement):
def __init__(self):
super(Smoothness, self).__init__('smoothness')
@@ -42,11 +60,15 @@ class Smoothness(page_measurement.PageMeasurement):
self._metrics.BindToAction(action)
else:
self._metrics.Start()
+ tab.ExecuteJavaScript(
+ 'console.time("' + smoothness.RENDER_PROCESS_MARKER + '")')
def DidRunAction(self, page, tab, action):
if tab.browser.platform.IsRawDisplayFrameRateSupported():
tab.browser.platform.StopRawDisplayFrameRateMeasurement()
if not action.CanBeBound():
+ tab.ExecuteJavaScript(
+ 'console.timeEnd("' + smoothness.RENDER_PROCESS_MARKER + '")')
self._metrics.Stop()
self._trace_result = tab.browser.StopTracing()
@@ -64,19 +86,16 @@ class Smoothness(page_measurement.PageMeasurement):
raise DidNotScrollException()
timeline = self._trace_result.AsTimelineModel()
- smoothness_marker = smoothness.FindTimelineMarker(timeline,
- smoothness.TIMELINE_MARKER)
- # TODO(dominikg): remove try..except once CL 23532057 has been rolled to the
- # reference builds?
- try:
- gesture_marker = smoothness.FindTimelineMarker(timeline,
- smoothness.SYNTHETIC_GESTURE_MARKER)
- except smoothness.MissingTimelineMarker:
- logging.warning(
- 'No gesture marker found in timeline; using smoothness marker instead.')
- gesture_marker = smoothness_marker
- benchmark_stats = RenderingStats(smoothness_marker,
- gesture_marker,
+ render_process_marker = smoothness.FindTimelineMarkers(
+ timeline, smoothness.RENDER_PROCESS_MARKER)
+ compound_action = page_test.GetCompoundActionFromPage(
+ page, self._action_name_to_run)
+ timeline_marker_labels = GetTimelineMarkerLabelsFromAction(compound_action)
+ timeline_markers = smoothness.FindTimelineMarkers(timeline,
+ timeline_marker_labels)
+
+ benchmark_stats = RenderingStats(render_process_marker,
+ timeline_markers,
rendering_stats_deltas,
self._metrics.is_using_gpu_benchmarking)
smoothness.CalcResults(benchmark_stats, results)
diff --git a/tools/perf/metrics/rendering_stats.py b/tools/perf/metrics/rendering_stats.py
index 00c8c80..60861c9 100644
--- a/tools/perf/metrics/rendering_stats.py
+++ b/tools/perf/metrics/rendering_stats.py
@@ -2,9 +2,11 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from operator import attrgetter
+
class RenderingStats(object):
- def __init__(self, smoothness_marker, gesture_marker, rendering_stats_deltas,
- used_gpu_benchmarking):
+ def __init__(self, render_process_marker, timeline_markers,
+ rendering_stats_deltas, used_gpu_benchmarking):
"""
Utility class for extracting rendering statistics from the timeline (or
other loggin facilities), and providing them in a common format to classes
@@ -14,15 +16,16 @@ class RenderingStats(object):
metrics from the stats must be able to handle both cases. The length of
different list stats may vary.
- All *_time values are measured in seconds.
+ All *_time values are measured in milliseconds.
"""
- self.renderer_process = smoothness_marker.start_thread.parent
- self.start = gesture_marker.start
- self.end = self.start + gesture_marker.duration
+ assert(len(render_process_marker) == 1)
+ self.renderer_process = render_process_marker[0].start_thread.parent
+ self.start = timeline_markers[0].start
+ self.end = timeline_markers[-1].start + timeline_markers[-1].duration
- self.total_time = (self.end - self.start) / 1000.0
self.frame_count = []
self.frame_timestamps = []
+ self.frame_times = []
self.paint_time = []
self.painted_pixel_count = []
self.record_time = []
@@ -31,8 +34,11 @@ class RenderingStats(object):
self.rasterized_pixel_count = []
if used_gpu_benchmarking:
- self.initMainThreadStatsFromTimeline()
- self.initImplThreadStatsFromTimeline()
+ for marker in timeline_markers:
+ self.initMainThreadStatsFromTimeline(marker.start,
+ marker.start+marker.duration)
+ self.initImplThreadStatsFromTimeline(marker.start,
+ marker.start+marker.duration)
else:
self.initFrameCountsFromRenderingStats(rendering_stats_deltas)
@@ -44,62 +50,82 @@ class RenderingStats(object):
else:
self.frame_count = rs.get('numFramesSentToScreen', 0)
- def initMainThreadStatsFromTimeline(self):
+ def initMainThreadStatsFromTimeline(self, start, end):
# TODO(ernstm): Remove when CL with new event names was rolled into
# reference build.
event_name = 'BenchmarkInstrumentation::MainThreadRenderingStats'
for event in self.renderer_process.IterAllSlicesOfName(
'MainThreadRenderingStats::IssueTraceEvent'):
event_name = 'MainThreadRenderingStats::IssueTraceEvent'
+ events = []
for event in self.renderer_process.IterAllSlicesOfName(event_name):
- if event.start >= self.start and event.end <= self.end:
+ if event.start >= start and event.end <= end:
if 'data' not in event.args:
continue
- # TODO(ernstm): remove screen_frame_count when RenderingStats
- # cleanup CL was picked up by the reference build.
- if 'frame_count' in event.args['data']:
- frame_count = event.args['data']['frame_count']
- else:
- frame_count = event.args['data']['screen_frame_count']
- self.frame_count.append(frame_count)
- if frame_count > 1:
- raise ValueError, 'trace contains multi-frame render stats'
- if frame_count == 1:
- self.frame_timestamps.append(
- event.start)
- self.paint_time.append(
- event.args['data']['paint_time'])
- self.painted_pixel_count.append(
- event.args['data']['painted_pixel_count'])
- self.record_time.append(
- event.args['data']['record_time'])
- self.recorded_pixel_count.append(
- event.args['data']['recorded_pixel_count'])
+ events.append(event)
+ events.sort(key=attrgetter('start'))
+
+ first_frame = True
+ for event in events:
+ # TODO(ernstm): remove screen_frame_count when RenderingStats
+ # cleanup CL was picked up by the reference build.
+ if 'frame_count' in event.args['data']:
+ frame_count = event.args['data']['frame_count']
+ else:
+ frame_count = event.args['data']['screen_frame_count']
+ self.frame_count.append(frame_count)
+ if frame_count > 1:
+ raise ValueError, 'trace contains multi-frame render stats'
+ if frame_count == 1:
+ self.frame_timestamps.append(
+ event.start)
+ if not first_frame:
+ self.frame_times.append(round(self.frame_timestamps[-1] -
+ self.frame_timestamps[-2], 2))
+ first_frame = False
+ self.paint_time.append(1000.0 *
+ event.args['data']['paint_time'])
+ self.painted_pixel_count.append(
+ event.args['data']['painted_pixel_count'])
+ self.record_time.append(1000.0 *
+ event.args['data']['record_time'])
+ self.recorded_pixel_count.append(
+ event.args['data']['recorded_pixel_count'])
- def initImplThreadStatsFromTimeline(self):
+ def initImplThreadStatsFromTimeline(self, start, end):
# TODO(ernstm): Remove when CL with new event names was rolled into
# reference build.
event_name = 'BenchmarkInstrumentation::ImplThreadRenderingStats'
for event in self.renderer_process.IterAllSlicesOfName(
'ImplThreadRenderingStats::IssueTraceEvent'):
event_name = 'ImplThreadRenderingStats::IssueTraceEvent'
+ events = []
for event in self.renderer_process.IterAllSlicesOfName(event_name):
- if event.start >= self.start and event.end <= self.end:
+ if event.start >= start and event.end <= end:
if 'data' not in event.args:
continue
- # TODO(ernstm): remove screen_frame_count when RenderingStats
- # cleanup CL was picked up by the reference build.
- if 'frame_count' in event.args['data']:
- frame_count = event.args['data']['frame_count']
- else:
- frame_count = event.args['data']['screen_frame_count']
- self.frame_count.append(frame_count)
- if frame_count > 1:
- raise ValueError, 'trace contains multi-frame render stats'
- if frame_count == 1:
- self.frame_timestamps.append(
- event.start)
- self.rasterize_time.append(
- event.args['data']['rasterize_time'])
- self.rasterized_pixel_count.append(
- event.args['data']['rasterized_pixel_count'])
+ events.append(event)
+ events.sort(key=attrgetter('start'))
+
+ first_frame = True
+ for event in events:
+ # TODO(ernstm): remove screen_frame_count when RenderingStats
+ # cleanup CL was picked up by the reference build.
+ if 'frame_count' in event.args['data']:
+ frame_count = event.args['data']['frame_count']
+ else:
+ frame_count = event.args['data']['screen_frame_count']
+ self.frame_count.append(frame_count)
+ if frame_count > 1:
+ raise ValueError, 'trace contains multi-frame render stats'
+ if frame_count == 1:
+ self.frame_timestamps.append(
+ event.start)
+ if not first_frame:
+ self.frame_times.append(round(self.frame_timestamps[-1] -
+ self.frame_timestamps[-2], 2))
+ first_frame = False
+ self.rasterize_time.append(1000.0 *
+ event.args['data']['rasterize_time'])
+ self.rasterized_pixel_count.append(
+ event.args['data']['rasterized_pixel_count'])
diff --git a/tools/perf/metrics/smoothness.py b/tools/perf/metrics/smoothness.py
index e35af91..03391d2 100644
--- a/tools/perf/metrics/smoothness.py
+++ b/tools/perf/metrics/smoothness.py
@@ -3,13 +3,13 @@
# found in the LICENSE file.
import os
+from operator import attrgetter
from metrics import statistics
from telemetry.core import util
from telemetry.page import page_measurement
-TIMELINE_MARKER = 'smoothness_scroll'
-SYNTHETIC_GESTURE_MARKER = 'SyntheticGestureController::running'
+RENDER_PROCESS_MARKER = 'RenderProcessMarker'
class SmoothnessMetrics(object):
@@ -46,9 +46,9 @@ class SmoothnessMetrics(object):
action.BindMeasurementJavaScript(
self._tab,
'window.__renderingStats.start(); ' +
- 'console.time("' + TIMELINE_MARKER + '")',
+ 'console.time("' + RENDER_PROCESS_MARKER + '")',
'window.__renderingStats.stop(); ' +
- 'console.timeEnd("' + TIMELINE_MARKER + '")')
+ 'console.timeEnd("' + RENDER_PROCESS_MARKER + '")')
@property
def is_using_gpu_benchmarking(self):
@@ -94,18 +94,13 @@ def CalcFirstPaintTimeResults(results, tab):
def CalcResults(benchmark_stats, results):
s = benchmark_stats
- frame_times = []
- for i in xrange(1, len(s.frame_timestamps)):
- frame_times.append(
- round(s.frame_timestamps[i] - s.frame_timestamps[i-1], 2))
-
# List of raw frame times.
- results.Add('frame_times', 'ms', frame_times)
+ results.Add('frame_times', 'ms', s.frame_times)
# Arithmetic mean of frame times.
- mean_frame_time_ms = 1000 * statistics.ArithmeticMean(
- s.total_time, len(s.frame_timestamps))
- results.Add('mean_frame_time', 'ms', round(mean_frame_time_ms, 3))
+ mean_frame_time = statistics.ArithmeticMean(s.frame_times,
+ len(s.frame_times))
+ results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3))
# Absolute discrepancy of frame time stamps.
jank = statistics.FrameDiscrepancy(s.frame_timestamps)
@@ -114,7 +109,7 @@ def CalcResults(benchmark_stats, results):
# Are we hitting 60 fps for 95 percent of all frames? (Boolean value)
# We use 17ms as a slightly looser threshold, instead of 1000.0/60.0.
results.Add('mostly_smooth', '',
- statistics.Percentile(frame_times, 95.0) < 17.0)
+ statistics.Percentile(s.frame_times, 95.0) < 17.0)
class MissingTimelineMarker(page_measurement.MeasurementFailure):
@@ -123,13 +118,35 @@ class MissingTimelineMarker(page_measurement.MeasurementFailure):
'Timeline marker not found: ' + name)
-def FindTimelineMarker(timeline, name):
- """Find the timeline event with the given name.
+class OverlappingTimelineMarkers(page_measurement.MeasurementFailure):
+ def __init__(self):
+ super(OverlappingTimelineMarkers, self).__init__(
+ 'Overlapping timeline markers found')
+
+
+def FindTimelineMarkers(timeline, timeline_marker_labels):
+ """Find the timeline events with the given names.
- If there is not exactly one such timeline event, raise an error.
+ If the number and order of events found does not match the labels,
+ raise an error.
"""
- events = [s for s in timeline.GetAllEventsOfName(name)
- if s.parent_slice == None]
- if len(events) != 1:
- raise MissingTimelineMarker(name)
- return events[0]
+ events = []
+ if not type(timeline_marker_labels) is list:
+ timeline_marker_labels = [timeline_marker_labels]
+ for label in timeline_marker_labels:
+ if not label:
+ continue
+ events = [s for s in timeline.GetAllEventsOfName(label)
+ if s.parent_slice == None]
+ events.sort(key=attrgetter('start'))
+
+ for (i, event) in enumerate(events):
+ if timeline_marker_labels[i] and event.name != timeline_marker_labels[i]:
+ raise MissingTimelineMarker(timeline_marker_labels[i])
+
+ for i in xrange(0, len(events)):
+ for j in xrange(i+1, len(events)):
+ if (events[j].start < events[i].start + events[i].duration):
+ raise OverlappingTimelineMarkers()
+
+ return events
diff --git a/tools/perf/metrics/smoothness_unittest.py b/tools/perf/metrics/smoothness_unittest.py
index 5a69625..6a0008d 100644
--- a/tools/perf/metrics/smoothness_unittest.py
+++ b/tools/perf/metrics/smoothness_unittest.py
@@ -13,6 +13,8 @@ from telemetry.core.backends.chrome.trace_result import TraceResult
from telemetry.page import page
from telemetry.page.page_measurement_results import PageMeasurementResults
+SYNTHETIC_GESTURE_MARKER = 'SyntheticGestureController::running'
+
class MockTimer(object):
"""A mock timer class which can generate random durations.
@@ -97,13 +99,14 @@ class SmoothnessMetricUnitTest(unittest.TestCase):
trace_events = []
total_time_seconds = 0.0
num_frames_sent = 0.0
+ first_frame = True
previous_frame_time = None
# This list represents time differences between frames in milliseconds.
expected_frame_times = []
# Append start trace events for the timeline marker and gesture marker,
# with some amount of time in between them.
- trace_events.append({'name': smoothness.TIMELINE_MARKER,
+ trace_events.append({'name': smoothness.RENDER_PROCESS_MARKER,
'tts': mock_timer.microseconds,
'args': {},
'pid': 20978,
@@ -113,7 +116,7 @@ class SmoothnessMetricUnitTest(unittest.TestCase):
'ph': 'S', # Phase: start.
'id': '0x12345'})
mock_timer.Advance()
- trace_events.append({'name': smoothness.SYNTHETIC_GESTURE_MARKER,
+ trace_events.append({'name': SYNTHETIC_GESTURE_MARKER,
'tts': mock_timer.microseconds,
'args': {},
'pid': 20978,
@@ -128,9 +131,13 @@ class SmoothnessMetricUnitTest(unittest.TestCase):
mock_frame = MockFrame(mock_timer)
mock_frame.AppendTraceEventForMainThreadStats(trace_events)
mock_frame.AppendTraceEventForImplThreadStats(trace_events)
- total_time_seconds += mock_frame.duration / 1e6
- num_frames_sent += mock_frame.main_stats['frame_count']
- num_frames_sent += mock_frame.impl_stats['frame_count']
+ # Exclude the first frame, because it may have started before the
+ # benchmark run.
+ if not first_frame:
+ total_time_seconds += mock_frame.duration / 1e6
+ num_frames_sent += mock_frame.main_stats['frame_count']
+ num_frames_sent += mock_frame.impl_stats['frame_count']
+ first_frame = False
current_frame_time = mock_timer.microseconds / 1000.0
if previous_frame_time:
difference = current_frame_time - previous_frame_time
@@ -140,7 +147,7 @@ class SmoothnessMetricUnitTest(unittest.TestCase):
# Append finish trace events for the timeline and gesture markers, in the
# reverse order from how they were added, with some time in between.
- trace_events.append({'name': smoothness.SYNTHETIC_GESTURE_MARKER,
+ trace_events.append({'name': SYNTHETIC_GESTURE_MARKER,
'tts': mock_timer.microseconds,
'args': {},
'pid': 20978,
@@ -150,7 +157,7 @@ class SmoothnessMetricUnitTest(unittest.TestCase):
'ph': 'F', # Phase: finish.
'id': '0xabcde'})
mock_timer.Advance()
- trace_events.append({'name': smoothness.TIMELINE_MARKER,
+ trace_events.append({'name': smoothness.RENDER_PROCESS_MARKER,
'tts': mock_timer.microseconds,
'args': {},
'pid': 20978,
@@ -167,12 +174,12 @@ class SmoothnessMetricUnitTest(unittest.TestCase):
# Find the timeline marker and gesture marker in the timeline,
# and create a RenderingStats object.
- smoothness_marker = smoothness.FindTimelineMarker(
- timeline, smoothness.TIMELINE_MARKER)
- gesture_marker = smoothness.FindTimelineMarker(
- timeline, smoothness.SYNTHETIC_GESTURE_MARKER)
+ render_process_marker = smoothness.FindTimelineMarkers(
+ timeline, smoothness.RENDER_PROCESS_MARKER)
+ timeline_markers = smoothness.FindTimelineMarkers(
+ timeline, SYNTHETIC_GESTURE_MARKER)
stats = RenderingStats(
- smoothness_marker, gesture_marker, {}, True)
+ render_process_marker, timeline_markers, {}, True)
# Make a results object and add results to it from the smoothness metric.
res = PageMeasurementResults()
diff --git a/tools/telemetry/telemetry/page/actions/page_action.py b/tools/telemetry/telemetry/page/actions/page_action.py
index c6da080..1485df5 100644
--- a/tools/telemetry/telemetry/page/actions/page_action.py
+++ b/tools/telemetry/telemetry/page/actions/page_action.py
@@ -59,3 +59,6 @@ class PageAction(object):
stop_js: JavaScript code that stops measurements.
"""
raise Exception('This action cannot be bound.')
+
+ def GetTimelineMarkerLabel(self):
+ return None
diff --git a/tools/telemetry/telemetry/page/actions/pinch.py b/tools/telemetry/telemetry/page/actions/pinch.py
index ff67594..d0e8f29 100644
--- a/tools/telemetry/telemetry/page/actions/pinch.py
+++ b/tools/telemetry/telemetry/page/actions/pinch.py
@@ -55,3 +55,6 @@ class PinchAction(page_action.PageAction):
window.__pinchAction.beginMeasuringHook = function() { %s };
window.__pinchAction.endMeasuringHook = function() { %s };
""" % (start_js, stop_js))
+
+ def GetTimelineMarkerLabel(self):
+ return 'SyntheticGestureController::running'
diff --git a/tools/telemetry/telemetry/page/actions/scroll.py b/tools/telemetry/telemetry/page/actions/scroll.py
index 8c9e105..e7f2680 100644
--- a/tools/telemetry/telemetry/page/actions/scroll.py
+++ b/tools/telemetry/telemetry/page/actions/scroll.py
@@ -84,3 +84,6 @@ class ScrollAction(page_action.PageAction):
window.__scrollAction.beginMeasuringHook = function() { %s };
window.__scrollAction.endMeasuringHook = function() { %s };
""" % (start_js, stop_js))
+
+ def GetTimelineMarkerLabel(self):
+ return 'SyntheticGestureController::running'
diff --git a/tools/telemetry/telemetry/page/actions/wait.py b/tools/telemetry/telemetry/page/actions/wait.py
index 861c916..c3f80fb 100644
--- a/tools/telemetry/telemetry/page/actions/wait.py
+++ b/tools/telemetry/telemetry/page/actions/wait.py
@@ -18,6 +18,9 @@ class WaitAction(page_action.PageAction):
getattr(self, 'condition', None) == 'href_change')
def RunAction(self, page, tab, previous_action):
+ tab.ExecuteJavaScript(
+ 'console.time("' + self.GetTimelineMarkerLabel() + '")')
+
if hasattr(self, 'seconds'):
time.sleep(self.seconds)
@@ -66,3 +69,9 @@ class WaitAction(page_action.PageAction):
self.timeout)
else:
raise page_action.PageActionFailed('No wait condition found')
+
+ tab.ExecuteJavaScript(
+ 'console.timeEnd("' + self.GetTimelineMarkerLabel() + '")')
+
+ def GetTimelineMarkerLabel(self):
+ return 'WaitAction::RunAction'