diff options
-rw-r--r-- | tools/perf/measurements/rasterize_and_record.py | 59 | ||||
-rw-r--r-- | tools/perf/measurements/smoothness.py | 72 | ||||
-rw-r--r-- | tools/perf/metrics/__init__.py | 5 | ||||
-rw-r--r-- | tools/perf/metrics/rendering_stats.py | 27 | ||||
-rw-r--r-- | tools/perf/metrics/smoothness.py | 163 | ||||
-rw-r--r-- | tools/perf/metrics/smoothness_unittest.py | 35 | ||||
-rw-r--r-- | tools/telemetry/telemetry/core/timeline/model.py | 47 | ||||
-rw-r--r-- | tools/telemetry/telemetry/page/actions/pinch.js | 4 | ||||
-rw-r--r-- | tools/telemetry/telemetry/page/actions/scroll.js | 4 |
9 files changed, 148 insertions, 268 deletions
diff --git a/tools/perf/measurements/rasterize_and_record.py b/tools/perf/measurements/rasterize_and_record.py index 5018155..e12d14f 100644 --- a/tools/perf/measurements/rasterize_and_record.py +++ b/tools/perf/measurements/rasterize_and_record.py @@ -4,10 +4,10 @@ import time -from metrics import smoothness -from metrics.rendering_stats import RenderingStats +from metrics import rendering_stats from telemetry.page import page_measurement + class RasterizeAndRecord(page_measurement.PageMeasurement): def __init__(self): super(RasterizeAndRecord, self).__init__('', True) @@ -29,11 +29,11 @@ class RasterizeAndRecord(page_measurement.PageMeasurement): '(must be long enough to render one frame)') def CustomizeBrowserOptions(self, options): - smoothness.SmoothnessMetrics.CustomizeBrowserOptions(options) # Run each raster task N times. This allows us to report the time for the # best run, effectively excluding cache effects and time when the thread is # de-scheduled. options.AppendExtraBrowserArgs([ + '--enable-gpu-benchmarking', '--slow-down-raster-scale-factor=%d' % int( options.raster_record_repeat), # Enable impl-side-painting. Current version of benchmark only works for @@ -44,8 +44,6 @@ class RasterizeAndRecord(page_measurement.PageMeasurement): ]) def MeasurePage(self, page, tab, results): - self._metrics = smoothness.SmoothnessMetrics(tab) - # Rasterize only what's visible. tab.ExecuteJavaScript( 'chrome.gpuBenchmarking.setRasterizeOnlyVisibleContent();') @@ -57,48 +55,41 @@ class RasterizeAndRecord(page_measurement.PageMeasurement): # Render one frame before we start gathering a trace. On some pages, the # first frame requested has more variance in the number of pixels # rasterized. - tab.ExecuteJavaScript(""" - window.__rafFired = false; - window.webkitRequestAnimationFrame(function() { - chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers(); - window.__rafFired = true; - }); - """) + tab.ExecuteJavaScript( + 'window.__rafFired = false;' + 'window.webkitRequestAnimationFrame(function() {' + 'chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers();' + 'window.__rafFired = true;' + '});') time.sleep(float(self.options.stop_wait_time)) tab.browser.StartTracing('webkit.console,benchmark', 60) - self._metrics.Start() - tab.ExecuteJavaScript(""" - window.__rafFired = false; - window.webkitRequestAnimationFrame(function() { - chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers(); - console.time("measureNextFrame"); - window.__rafFired = true; - }); - """) + tab.ExecuteJavaScript( + 'window.__rafFired = false;' + 'window.webkitRequestAnimationFrame(function() {' + 'chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers();' + 'console.time("' + rendering_stats.RENDER_PROCESS_MARKER + '");' + 'window.__rafFired = true;' + '});') # Wait until the frame was drawn. # Needs to be adjusted for every device and for different # raster_record_repeat counts. # TODO(ernstm): replace by call-back. time.sleep(float(self.options.stop_wait_time)) - tab.ExecuteJavaScript('console.timeEnd("measureNextFrame")') + tab.ExecuteJavaScript( + 'console.timeEnd("' + rendering_stats.RENDER_PROCESS_MARKER + '")') - self._metrics.Stop() - rendering_stats_deltas = self._metrics.deltas timeline = tab.browser.StopTracing().AsTimelineModel() - timeline_markers = smoothness.FindTimelineMarkers(timeline, - 'measureNextFrame') - benchmark_stats = RenderingStats(timeline_markers, - timeline_markers, - rendering_stats_deltas, - self._metrics.is_using_gpu_benchmarking) + timeline_markers = timeline.FindTimelineMarkers( + rendering_stats.RENDER_PROCESS_MARKER) + stats = rendering_stats.RenderingStats(timeline_markers, timeline_markers) results.Add('rasterize_time', 'ms', - max(benchmark_stats.rasterize_time)) + max(stats.rasterize_time)) results.Add('record_time', 'ms', - max(benchmark_stats.record_time)) + max(stats.record_time)) results.Add('rasterized_pixels', 'pixels', - max(benchmark_stats.rasterized_pixel_count)) + max(stats.rasterized_pixel_count)) results.Add('recorded_pixels', 'pixels', - max(benchmark_stats.recorded_pixel_count)) + max(stats.recorded_pixel_count)) diff --git a/tools/perf/measurements/smoothness.py b/tools/perf/measurements/smoothness.py index 4fd0ad3..7d3e391 100644 --- a/tools/perf/measurements/smoothness.py +++ b/tools/perf/measurements/smoothness.py @@ -3,25 +3,27 @@ # found in the LICENSE file. from metrics import smoothness -from metrics.rendering_stats import RenderingStats +from metrics import rendering_stats from telemetry.page import page_test from telemetry.page import page_measurement +from telemetry.core.timeline.model import MarkerMismatchError -class DidNotScrollException(page_measurement.MeasurementFailure): +class NotEnoughFramesError(page_measurement.MeasurementFailure): def __init__(self): - super(DidNotScrollException, self).__init__('Page did not scroll') + super(NotEnoughFramesError, self).__init__( + 'Page output less than two frames') -class MissingDisplayFrameRate(page_measurement.MeasurementFailure): +class MissingDisplayFrameRateError(page_measurement.MeasurementFailure): def __init__(self, name): - super(MissingDisplayFrameRate, self).__init__( + super(MissingDisplayFrameRateError, self).__init__( 'Missing display frame rate metrics: ' + name) -class NoSupportedActionException(page_measurement.MeasurementFailure): +class NoSupportedActionError(page_measurement.MeasurementFailure): def __init__(self): - super(NoSupportedActionException, self).__init__( + super(NoSupportedActionError, self).__init__( 'None of the actions is supported by smoothness measurement') @@ -33,18 +35,17 @@ def GetTimelineMarkerLabelsFromAction(compound_action): if action.GetTimelineMarkerLabel(): timeline_marker_labels.append(action.GetTimelineMarkerLabel()) if not timeline_marker_labels: - raise NoSupportedActionException() + raise NoSupportedActionError() return timeline_marker_labels class Smoothness(page_measurement.PageMeasurement): def __init__(self): super(Smoothness, self).__init__('smoothness') - self._metrics = None self._trace_result = None def CustomizeBrowserOptions(self, options): - smoothness.SmoothnessMetrics.CustomizeBrowserOptions(options) + options.AppendExtraBrowserArgs('--enable-gpu-benchmarking') def CanRunForPage(self, page): return hasattr(page, 'smoothness') @@ -55,57 +56,40 @@ class Smoothness(page_measurement.PageMeasurement): tab.browser.StartTracing('webkit,webkit.console,benchmark', 60) if tab.browser.platform.IsRawDisplayFrameRateSupported(): tab.browser.platform.StartRawDisplayFrameRateMeasurement() - self._metrics = smoothness.SmoothnessMetrics(tab) - if action.CanBeBound(): - self._metrics.BindToAction(action) - else: - self._metrics.Start() - tab.ExecuteJavaScript( - 'console.time("' + smoothness.RENDER_PROCESS_MARKER + '")') + tab.ExecuteJavaScript( + 'console.time("' + rendering_stats.RENDER_PROCESS_MARKER + '")') def DidRunAction(self, page, tab, action): + tab.ExecuteJavaScript( + 'console.timeEnd("' + rendering_stats.RENDER_PROCESS_MARKER + '")') if tab.browser.platform.IsRawDisplayFrameRateSupported(): tab.browser.platform.StopRawDisplayFrameRateMeasurement() - if not action.CanBeBound(): - tab.ExecuteJavaScript( - 'console.timeEnd("' + smoothness.RENDER_PROCESS_MARKER + '")') - self._metrics.Stop() self._trace_result = tab.browser.StopTracing() def MeasurePage(self, page, tab, results): - rendering_stats_deltas = self._metrics.deltas - - # TODO(ernstm): remove numFramesSentToScreen when RenderingStats - # cleanup CL was picked up by the reference build. - if 'frameCount' in rendering_stats_deltas: - frame_count = rendering_stats_deltas.get('frameCount', 0) - else: - frame_count = rendering_stats_deltas.get('numFramesSentToScreen', 0) - - if not (frame_count > 0): - raise DidNotScrollException() - timeline = self._trace_result.AsTimelineModel() - render_process_marker = smoothness.FindTimelineMarkers( - timeline, smoothness.RENDER_PROCESS_MARKER) + render_process_marker = timeline.FindTimelineMarkers( + rendering_stats.RENDER_PROCESS_MARKER) compound_action = page_test.GetCompoundActionFromPage( page, self._action_name_to_run) timeline_marker_labels = GetTimelineMarkerLabelsFromAction(compound_action) # TODO(ernstm): remove try-except when the reference build was updated? try: - timeline_markers = smoothness.FindTimelineMarkers( - timeline, timeline_marker_labels) - except smoothness.TimelineMarkerMismatchException: + timeline_markers = timeline.FindTimelineMarkers(timeline_marker_labels) + except MarkerMismatchError: timeline_markers = render_process_marker - benchmark_stats = RenderingStats(render_process_marker, - timeline_markers, - rendering_stats_deltas, - self._metrics.is_using_gpu_benchmarking) - smoothness.CalcResults(benchmark_stats, results) + stats = rendering_stats.RenderingStats( + render_process_marker, timeline_markers) + + if not stats.frame_times: + raise NotEnoughFramesError() + + smoothness_metric = smoothness.SmoothnessMetric(stats) + smoothness_metric.AddResults(tab, results) if tab.browser.platform.IsRawDisplayFrameRateSupported(): for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements(): if r.value is None: - raise MissingDisplayFrameRate(r.name) + raise MissingDisplayFrameRateError(r.name) results.Add(r.name, r.unit, r.value) diff --git a/tools/perf/metrics/__init__.py b/tools/perf/metrics/__init__.py index 79c47ab..93d13c6 100644 --- a/tools/perf/metrics/__init__.py +++ b/tools/perf/metrics/__init__.py @@ -25,11 +25,11 @@ class Metric(object): def Start(self, page, tab): """Start collecting data for this metric.""" - raise NotImplementedError() + pass def Stop(self, page, tab): """Stop collecting data for this metric (if applicable).""" - raise NotImplementedError() + pass def AddResults(self, tab, results): """Add the data collected into the results object for a measurement. @@ -39,4 +39,3 @@ class Metric(object): results should be added with results.Add(trace_name, unit, value). """ raise NotImplementedError() - diff --git a/tools/perf/metrics/rendering_stats.py b/tools/perf/metrics/rendering_stats.py index da1713d..9bde896 100644 --- a/tools/perf/metrics/rendering_stats.py +++ b/tools/perf/metrics/rendering_stats.py @@ -4,9 +4,11 @@ from operator import attrgetter +RENDER_PROCESS_MARKER = 'RenderProcessMarker' + + class RenderingStats(object): - def __init__(self, render_process_marker, timeline_markers, - rendering_stats_deltas, used_gpu_benchmarking): + def __init__(self, render_process_marker, timeline_markers): """ Utility class for extracting rendering statistics from the timeline (or other loggin facilities), and providing them in a common format to classes @@ -34,22 +36,11 @@ class RenderingStats(object): self.rasterize_time = [] self.rasterized_pixel_count = [] - if used_gpu_benchmarking: - for marker in timeline_markers: - self.initMainThreadStatsFromTimeline(marker.start, - marker.start+marker.duration) - self.initImplThreadStatsFromTimeline(marker.start, - marker.start+marker.duration) - else: - self.initFrameCountsFromRenderingStats(rendering_stats_deltas) - - def initFrameCountsFromRenderingStats(self, rs): - # TODO(ernstm): remove numFramesSentToScreen when RenderingStats - # cleanup CL was picked up by the reference build. - if 'frameCount' in rs: - self.frame_count = rs.get('frameCount', 0) - else: - self.frame_count = rs.get('numFramesSentToScreen', 0) + for marker in timeline_markers: + self.initMainThreadStatsFromTimeline(marker.start, + marker.start+marker.duration) + self.initImplThreadStatsFromTimeline(marker.start, + marker.start+marker.duration) def initMainThreadStatsFromTimeline(self, start, end): # TODO(ernstm): Remove when CL with new event names was rolled into diff --git a/tools/perf/metrics/smoothness.py b/tools/perf/metrics/smoothness.py index dce093d..d78d909 100644 --- a/tools/perf/metrics/smoothness.py +++ b/tools/perf/metrics/smoothness.py @@ -2,154 +2,29 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import os -from operator import attrgetter - from metrics import statistics -from telemetry.core import util -from telemetry.page import page_measurement - -RENDER_PROCESS_MARKER = 'RenderProcessMarker' - - -class SmoothnessMetrics(object): - def __init__(self, tab): - self._tab = tab - with open( - os.path.join(os.path.dirname(__file__), - 'smoothness.js')) as f: - js = f.read() - tab.ExecuteJavaScript(js) - - @classmethod - def CustomizeBrowserOptions(cls, options): - options.AppendExtraBrowserArgs('--enable-gpu-benchmarking') - - def Start(self): - self._tab.ExecuteJavaScript( - 'window.__renderingStats = new __RenderingStats();' - 'window.__renderingStats.start()') - - def SetNeedsDisplayOnAllLayersAndStart(self): - self._tab.ExecuteJavaScript( - 'chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers();' - 'window.__renderingStats = new __RenderingStats();' - 'window.__renderingStats.start()') - - def Stop(self): - self._tab.ExecuteJavaScript('window.__renderingStats.stop()') - - def BindToAction(self, action): - # Make the scroll test start and stop measurement automatically. - self._tab.ExecuteJavaScript( - 'window.__renderingStats = new __RenderingStats();') - action.BindMeasurementJavaScript( - self._tab, - 'window.__renderingStats.start(); ' + - 'console.time("' + RENDER_PROCESS_MARKER + '")', - 'window.__renderingStats.stop(); ' + - 'console.timeEnd("' + RENDER_PROCESS_MARKER + '")') - - @property - def is_using_gpu_benchmarking(self): - return self._tab.EvaluateJavaScript( - 'window.__renderingStats.isUsingGpuBenchmarking()') - - @property - def start_values(self): - return self._tab.EvaluateJavaScript( - 'window.__renderingStats.getStartValues()') - - @property - def end_values(self): - return self._tab.EvaluateJavaScript( - 'window.__renderingStats.getEndValues()') - - @property - def deltas(self): - return self._tab.EvaluateJavaScript( - 'window.__renderingStats.getDeltas()') - - -def CalcFirstPaintTimeResults(results, tab): - if tab.browser.is_content_shell: - results.Add('first_paint', 'ms', 'unsupported') - return - - tab.ExecuteJavaScript(""" - window.__rafFired = false; - window.webkitRequestAnimationFrame(function() { - window.__rafFired = true; - }); - """) - util.WaitFor(lambda: tab.EvaluateJavaScript('window.__rafFired'), 60) - - first_paint_secs = tab.EvaluateJavaScript( - 'window.chrome.loadTimes().firstPaintTime - ' + - 'window.chrome.loadTimes().startLoadTime') - - results.Add('first_paint', 'ms', round(first_paint_secs * 1000, 1)) - - -def CalcResults(benchmark_stats, results): - s = benchmark_stats - - # List of raw frame times. - results.Add('frame_times', 'ms', s.frame_times) - - # Arithmetic mean of frame times. - mean_frame_time = statistics.ArithmeticMean(s.frame_times, - len(s.frame_times)) - results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3)) - - # Absolute discrepancy of frame time stamps. - jank = statistics.FrameDiscrepancy(s.frame_timestamps) - results.Add('jank', '', round(jank, 4)) - - # Are we hitting 60 fps for 95 percent of all frames? (Boolean value) - # We use 17ms as a slightly looser threshold, instead of 1000.0/60.0. - results.Add('mostly_smooth', '', - statistics.Percentile(s.frame_times, 95.0) < 17.0) - - -class TimelineMarkerMismatchException(page_measurement.MeasurementFailure): - def __init__(self): - super(TimelineMarkerMismatchException, self).__init__( - 'Number or order of timeline markers does not match provided labels') - - -class TimelineMarkerOverlapException(page_measurement.MeasurementFailure): - def __init__(self): - super(TimelineMarkerOverlapException, self).__init__( - 'Overlapping timeline markers found') - +from metrics import Metric -def FindTimelineMarkers(timeline, timeline_marker_labels): - """Find the timeline events with the given names. - If the number and order of events found does not match the labels, - raise an error. - """ - events = [] - if not type(timeline_marker_labels) is list: - timeline_marker_labels = [timeline_marker_labels] - for label in timeline_marker_labels: - if not label: - continue - events = [s for s in timeline.GetAllEventsOfName(label) - if s.parent_slice == None] - events.sort(key=attrgetter('start')) +class SmoothnessMetric(Metric): + def __init__(self, rendering_stats): + super(SmoothnessMetric, self).__init__() + self.stats_ = rendering_stats - if len(events) != len(timeline_marker_labels): - raise TimelineMarkerMismatchException() + def AddResults(self, tab, results): + # List of raw frame times. + results.Add('frame_times', 'ms', self.stats_.frame_times) - for (i, event) in enumerate(events): - if timeline_marker_labels[i] and event.name != timeline_marker_labels[i]: - raise TimelineMarkerMismatchException() + # Arithmetic mean of frame times. + mean_frame_time = statistics.ArithmeticMean(self.stats_.frame_times, + len(self.stats_.frame_times)) + results.Add('mean_frame_time', 'ms', round(mean_frame_time, 3)) - for i in xrange(0, len(events)): - for j in xrange(i+1, len(events)): - if (events[j].start < events[i].start + events[i].duration): - raise TimelineMarkerOverlapException() + # Absolute discrepancy of frame time stamps. + jank = statistics.FrameDiscrepancy(self.stats_.frame_timestamps) + results.Add('jank', '', round(jank, 4)) - return events + # Are we hitting 60 fps for 95 percent of all frames? (Boolean value) + # We use 17ms as a slightly looser threshold, instead of 1000.0/60.0. + results.Add('mostly_smooth', '', + statistics.Percentile(self.stats_.frame_times, 95.0) < 17.0) diff --git a/tools/perf/metrics/smoothness_unittest.py b/tools/perf/metrics/smoothness_unittest.py index 6a0008d..6dab9e3 100644 --- a/tools/perf/metrics/smoothness_unittest.py +++ b/tools/perf/metrics/smoothness_unittest.py @@ -7,7 +7,7 @@ import unittest from metrics import smoothness from metrics import statistics -from metrics.rendering_stats import RenderingStats +from metrics import rendering_stats from telemetry.core.backends.chrome.tracing_backend import RawTraceResultImpl from telemetry.core.backends.chrome.trace_result import TraceResult from telemetry.page import page @@ -106,7 +106,7 @@ class SmoothnessMetricUnitTest(unittest.TestCase): # Append start trace events for the timeline marker and gesture marker, # with some amount of time in between them. - trace_events.append({'name': smoothness.RENDER_PROCESS_MARKER, + trace_events.append({'name': rendering_stats.RENDER_PROCESS_MARKER, 'tts': mock_timer.microseconds, 'args': {}, 'pid': 20978, @@ -157,7 +157,7 @@ class SmoothnessMetricUnitTest(unittest.TestCase): 'ph': 'F', # Phase: finish. 'id': '0xabcde'}) mock_timer.Advance() - trace_events.append({'name': smoothness.RENDER_PROCESS_MARKER, + trace_events.append({'name': rendering_stats.RENDER_PROCESS_MARKER, 'tts': mock_timer.microseconds, 'args': {}, 'pid': 20978, @@ -174,32 +174,33 @@ class SmoothnessMetricUnitTest(unittest.TestCase): # Find the timeline marker and gesture marker in the timeline, # and create a RenderingStats object. - render_process_marker = smoothness.FindTimelineMarkers( - timeline, smoothness.RENDER_PROCESS_MARKER) - timeline_markers = smoothness.FindTimelineMarkers( - timeline, SYNTHETIC_GESTURE_MARKER) - stats = RenderingStats( - render_process_marker, timeline_markers, {}, True) + render_process_marker = timeline.FindTimelineMarkers( + rendering_stats.RENDER_PROCESS_MARKER) + timeline_markers = timeline.FindTimelineMarkers( + SYNTHETIC_GESTURE_MARKER) + stats = rendering_stats.RenderingStats( + render_process_marker, timeline_markers) # Make a results object and add results to it from the smoothness metric. - res = PageMeasurementResults() - res.WillMeasurePage(page.Page('http://foo.com/', None)) - smoothness.CalcResults(stats, res) - res.DidMeasurePage() + results = PageMeasurementResults() + results.WillMeasurePage(page.Page('http://foo.com/', None)) + smoothness_metric = smoothness.SmoothnessMetric(stats) + smoothness_metric.AddResults(None, results) + results.DidMeasurePage() self.assertEquals( expected_frame_times, - res.page_results[0]['frame_times'].value) + results.page_results[0]['frame_times'].value) self.assertAlmostEquals( 1000.0 * (total_time_seconds / num_frames_sent), - res.page_results[0]['mean_frame_time'].value, + results.page_results[0]['mean_frame_time'].value, places=2) # We don't verify the correctness of the discrepancy computation itself, # because we have a separate unit test for that purpose. self.assertAlmostEquals( statistics.FrameDiscrepancy(stats.frame_timestamps, True), - res.page_results[0]['jank'].value, + results.page_results[0]['jank'].value, places=4) # We do not verify the correctness of Percentile here; Percentile should @@ -208,4 +209,4 @@ class SmoothnessMetricUnitTest(unittest.TestCase): # in the smoothness metric. self.assertEquals( statistics.Percentile(expected_frame_times, 95.0) < 17.0, - res.page_results[0]['mostly_smooth'].value) + results.page_results[0]['mostly_smooth'].value) diff --git a/tools/telemetry/telemetry/core/timeline/model.py b/tools/telemetry/telemetry/core/timeline/model.py index aae0199..5efddd3 100644 --- a/tools/telemetry/telemetry/core/timeline/model.py +++ b/tools/telemetry/telemetry/core/timeline/model.py @@ -7,6 +7,8 @@ trace_viewer project: https://code.google.com/p/trace-viewer/ ''' +from operator import attrgetter + import telemetry.core.timeline.process as tracing_process # Register importers for data @@ -19,6 +21,19 @@ _IMPORTERS = [ trace_event_importer.TraceEventTimelineImporter ] + +class MarkerMismatchError(Exception): + def __init__(self): + super(MarkerMismatchError, self).__init__( + 'Number or order of timeline markers does not match provided labels') + + +class MarkerOverlapError(Exception): + def __init__(self): + super(MarkerOverlapError, self).__init__( + 'Overlapping timeline markers found') + + class TimelineModel(object): def __init__(self, event_data=None, shift_world_to_zero=True): self._bounds = bounds.Bounds() @@ -121,6 +136,38 @@ class TimelineModel(object): self._processes[pid] = tracing_process.Process(self, pid) return self._processes[pid] + def FindTimelineMarkers(self, timeline_marker_labels): + """Find the timeline events with the given names. + + If the number and order of events found does not match the labels, + raise an error. + """ + # Make sure labels are in a list and remove all None labels + if not isinstance(timeline_marker_labels, list): + timeline_marker_labels = [timeline_marker_labels] + labels = [x for x in timeline_marker_labels if x is not None] + + # Gather all events that match the labels and sort them. + events = [] + for label in labels: + events.extend([s for s in self.GetAllEventsOfName(label) + if s.parent_slice == None]) + events.sort(key=attrgetter('start')) + + # Check if the number and order of events matches the provided labels, + # and that the events don't overlap. + if len(events) != len(labels): + raise MarkerMismatchError() + for (i, event) in enumerate(events): + if event.name != labels[i]: + raise MarkerMismatchError() + for i in xrange(0, len(events)): + for j in xrange(i+1, len(events)): + if (events[j].start < events[i].start + events[i].duration): + raise MarkerOverlapError() + + return events + def _CreateImporter(self, event_data): for importer_class in _IMPORTERS: if importer_class.CanImport(event_data): diff --git a/tools/telemetry/telemetry/page/actions/pinch.js b/tools/telemetry/telemetry/page/actions/pinch.js index f0dccdb..581b8b2 100644 --- a/tools/telemetry/telemetry/page/actions/pinch.js +++ b/tools/telemetry/telemetry/page/actions/pinch.js @@ -66,10 +66,6 @@ this.onGestureComplete_.bind(this)); }; - PinchAction.prototype.getResults = function() { - return this.renderingStats_; - }; - PinchAction.prototype.onGestureComplete_ = function() { this.endMeasuringHook(); diff --git a/tools/telemetry/telemetry/page/actions/scroll.js b/tools/telemetry/telemetry/page/actions/scroll.js index 2addddf..e68d91a 100644 --- a/tools/telemetry/telemetry/page/actions/scroll.js +++ b/tools/telemetry/telemetry/page/actions/scroll.js @@ -126,10 +126,6 @@ this.onGestureComplete_.bind(this)); }; - ScrollAction.prototype.getResults = function() { - return this.renderingStats_; - } - ScrollAction.prototype.onGestureComplete_ = function() { this.endMeasuringHook(); |