diff options
-rw-r--r-- | tools/perf/measurements/smoothness.py | 51 | ||||
-rw-r--r-- | tools/perf/measurements/smoothness_unittest.py | 45 | ||||
-rw-r--r-- | tools/perf/measurements/timeline_based_measurement.py | 25 | ||||
-rw-r--r-- | tools/perf/measurements/timeline_based_measurement_unittest.py | 20 | ||||
-rw-r--r-- | tools/perf/metrics/smoothness.py | 96 | ||||
-rw-r--r-- | tools/perf/metrics/smoothness_unittest.py | 57 | ||||
-rw-r--r-- | tools/perf/metrics/timeline.py | 7 | ||||
-rw-r--r-- | tools/perf/metrics/timeline_based_metric.py | 1 | ||||
-rw-r--r-- | tools/perf/metrics/timeline_interaction_record.py | 24 | ||||
-rw-r--r-- | tools/perf/metrics/timeline_interaction_record_unittest.py | 2 | ||||
-rw-r--r-- | tools/telemetry/telemetry/page/page.py | 13 |
11 files changed, 186 insertions, 155 deletions
diff --git a/tools/perf/measurements/smoothness.py b/tools/perf/measurements/smoothness.py index a84a4ac..11628cc 100644 --- a/tools/perf/measurements/smoothness.py +++ b/tools/perf/measurements/smoothness.py @@ -4,14 +4,26 @@ from metrics import power from metrics import smoothness +from metrics import timeline_interaction_record as tir_module +from telemetry.core.timeline.model import TimelineModel from telemetry.page import page_measurement +import telemetry.core.timeline.bounds as timeline_bounds + + +class MissingDisplayFrameRateError(page_measurement.MeasurementFailure): + def __init__(self, name): + super(MissingDisplayFrameRateError, self).__init__( + 'Missing display frame rate metrics: ' + name) + class Smoothness(page_measurement.PageMeasurement): def __init__(self): super(Smoothness, self).__init__('smoothness') self._smoothness_metric = None self._power_metric = None + self._timeline_model = None + self._actions = [] def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArgs('--enable-gpu-benchmarking') @@ -24,15 +36,46 @@ class Smoothness(page_measurement.PageMeasurement): self._power_metric = power.PowerMetric() self._power_metric.Start(page, tab) self._smoothness_metric = smoothness.SmoothnessMetric() - self._smoothness_metric.Start(page, tab) + # Start tracing for smoothness metric. + custom_categories = ['webkit.console', 'benchmark'] + custom_categories += page.GetSyntheticDelayCategories() + tab.browser.StartTracing(','.join(custom_categories), 60) + if tab.browser.platform.IsRawDisplayFrameRateSupported(): + tab.browser.platform.StartRawDisplayFrameRateMeasurement() def DidRunAction(self, page, tab, action): - self._smoothness_metric.AddActionToIncludeInMetric(action) + self._actions.append(action) def DidRunActions(self, page, tab): self._power_metric.Stop(page, tab) - self._smoothness_metric.Stop(page, tab) + # Stop tracing for smoothness metric. + if tab.browser.platform.IsRawDisplayFrameRateSupported(): + tab.browser.platform.StopRawDisplayFrameRateMeasurement() + tracing_timeline_data = tab.browser.StopTracing() + self._timeline_model = TimelineModel(timeline_data=tracing_timeline_data) def MeasurePage(self, page, tab, results): self._power_metric.AddResults(tab, results) - self._smoothness_metric.AddResults(tab, results) + # Add results of smoothness metric. This computes the smoothness metric for + # the time range between the first action starts and the last action ends. + # To get the measurement for each action, use + # measurement.TimelineBasedMeasurement. + time_bounds = timeline_bounds.Bounds() + for action in self._actions: + time_bounds.AddBounds( + action.GetActiveRangeOnTimeline(self._timeline_model)) + # Create an interaction_record for this legacy measurement. Since we don't + # wrap the results that is sent to smoothnes metric, the logical_name will + # not be used. + interaction_record = tir_module.TimelineInteractionRecord( + 'smoothness_interaction', time_bounds.min, time_bounds.max) + renderer_thread = self._timeline_model.GetRendererThreadFromTab(tab) + self._smoothness_metric.AddResults(self._timeline_model, + renderer_thread, + interaction_record, + results) + if tab.browser.platform.IsRawDisplayFrameRateSupported(): + for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements(): + if r.value is None: + raise MissingDisplayFrameRateError(r.name) + results.Add(r.name, r.unit, r.value) diff --git a/tools/perf/measurements/smoothness_unittest.py b/tools/perf/measurements/smoothness_unittest.py index deacd37..def8141 100644 --- a/tools/perf/measurements/smoothness_unittest.py +++ b/tools/perf/measurements/smoothness_unittest.py @@ -3,9 +3,31 @@ # found in the LICENSE file. from measurements import smoothness from telemetry.core import wpr_modes +from telemetry.page import page from telemetry.page import page_measurement_unittest_base from telemetry.unittest import options_for_unittests +class FakePlatform(object): + def IsRawDisplayFrameRateSupported(self): + return False + + +class FakeBrowser(object): + def __init__(self): + self.platform = FakePlatform() + self.category_filter = None + + def StartTracing(self, category_filter, _): + self.category_filter = category_filter + + +class FakeTab(object): + def __init__(self): + self.browser = FakeBrowser() + + def ExecuteJavaScript(self, js): + pass + class SmoothnessUnitTest( page_measurement_unittest_base.PageMeasurementUnitTestBase): """Smoke test for smoothness measurement @@ -14,8 +36,31 @@ class SmoothnessUnitTest( that all metrics were added to the results. The test is purely functional, i.e. it only checks if the metrics are present and non-zero. """ + def testSyntheticDelayConfiguration(self): + attributes = { + 'synthetic_delays': { + 'cc.BeginMainFrame': { 'target_duration': 0.012 }, + 'cc.DrawAndSwap': { 'target_duration': 0.012, 'mode': 'alternating' }, + 'gpu.SwapBuffers': { 'target_duration': 0.012 } + } + } + test_page = page.Page('http://dummy', None, attributes=attributes) + tab = FakeTab() + measurement = smoothness.Smoothness() + measurement.WillRunActions(test_page, tab) + + expected_category_filter = [ + 'DELAY(cc.BeginMainFrame;0.012000;static)', + 'DELAY(cc.DrawAndSwap;0.012000;alternating)', + 'DELAY(gpu.SwapBuffers;0.012000;static)', + 'benchmark', + 'webkit.console' + ] + self.assertEquals(expected_category_filter, + sorted(tab.browser.category_filter.split(','))) def setUp(self): + self._options = options_for_unittests.GetCopy() self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF diff --git a/tools/perf/measurements/timeline_based_measurement.py b/tools/perf/measurements/timeline_based_measurement.py index a9e1ecc..f1af12d 100644 --- a/tools/perf/measurements/timeline_based_measurement.py +++ b/tools/perf/measurements/timeline_based_measurement.py @@ -4,11 +4,11 @@ from metrics import timeline as timeline_module from metrics import timeline_interaction_record as tir_module +from metrics import smoothness from telemetry.page import page_measurement from telemetry.core.timeline import model as model_module - # TimelineBasedMeasurement considers all instrumentation as producing a single # timeline. But, depending on the amount of instrumentation that is enabled, # overhead increases. The user of the measurement must therefore chose between @@ -24,6 +24,21 @@ ALL_OVERHEAD_LEVELS = [ ] +class _ResultsWrapper(object): + def __init__(self, results, interaction_record): + self._results = results + self._interaction_record = interaction_record + + def Add(self, trace_name, units, value, chart_name=None, data_type='default'): + trace_name = self._interaction_record.GetResultNameFor(trace_name) + self._results.Add(trace_name, units, value, chart_name, data_type) + + def AddSummary(self, trace_name, units, value, chart_name=None, + data_type='default'): + trace_name = self._interaction_record.GetResultNameFor(trace_name) + self._results.AddSummary(trace_name, units, value, chart_name, data_type) + + class _TimelineBasedMetrics(object): def __init__(self, model, renderer_thread): self._model = model @@ -31,14 +46,14 @@ class _TimelineBasedMetrics(object): def FindTimelineInteractionRecords(self): # TODO(nduca): Add support for page-load interaction record. - return [tir_module.TimelineInteractionRecord(event) for + return [tir_module.TimelineInteractionRecord.FromEvent(event) for event in self._renderer_thread.IterAllAsyncSlices() if tir_module.IsTimelineInteractionRecord(event.name)] def CreateMetricsForTimelineInteractionRecord(self, interaction): res = [] if interaction.is_smooth: - pass # TODO(nduca): res.append smoothness metric instance. + res.append(smoothness.SmoothnessMetric()) return res def AddResults(self, results): @@ -47,9 +62,10 @@ class _TimelineBasedMetrics(object): raise Exception('Expected at least one Interaction on the page') for interaction in interactions: metrics = self.CreateMetricsForTimelineInteractionRecord(interaction) + wrapped_results = _ResultsWrapper(results, interaction) for m in metrics: m.AddResults(self._model, self._renderer_thread, - interaction, results) + interaction, wrapped_results) class TimelineBasedMeasurement(page_measurement.PageMeasurement): @@ -99,6 +115,7 @@ class TimelineBasedMeasurement(page_measurement.PageMeasurement): categories = '' else: categories = '*,disabled-by-default-cc.debug' + categories = ','.join([categories] + page.GetSyntheticDelayCategories()) tab.browser.StartTracing(categories) def MeasurePage(self, page, tab, results): diff --git a/tools/perf/measurements/timeline_based_measurement_unittest.py b/tools/perf/measurements/timeline_based_measurement_unittest.py index 8c79593..694e0a8 100644 --- a/tools/perf/measurements/timeline_based_measurement_unittest.py +++ b/tools/perf/measurements/timeline_based_measurement_unittest.py @@ -57,15 +57,13 @@ class TimelineBasedMetricsTests(unittest.TestCase): class FakeSmoothMetric(timeline_based_metric.TimelineBasedMetric): def AddResults(self, model, renderer_thread, interaction_record, results): - results.Add( - interaction_record.GetResultNameFor('FakeSmoothMetric'), 'ms', 1) + results.Add('FakeSmoothMetric', 'ms', 1) class FakeLoadingMetric(timeline_based_metric.TimelineBasedMetric): def AddResults(self, model, renderer_thread, interaction_record, results): assert interaction_record.logical_name == 'LogicalName2' - results.Add( - interaction_record.GetResultNameFor('FakeLoadingMetric'), 'ms', 2) + results.Add('FakeLoadingMetric', 'ms', 2) class TimelineBasedMetricsWithFakeMetricHandler( tbm_module._TimelineBasedMetrics): # pylint: disable=W0212 @@ -90,9 +88,9 @@ class TimelineBasedMetricsTests(unittest.TestCase): metric.AddResults(results) results.DidMeasurePage() - v = results.FindAllPageSpecificValuesNamed('LogicalName1/FakeSmoothMetric') + v = results.FindAllPageSpecificValuesNamed('LogicalName1-FakeSmoothMetric') self.assertEquals(len(v), 1) - v = results.FindAllPageSpecificValuesNamed('LogicalName2/FakeLoadingMetric') + v = results.FindAllPageSpecificValuesNamed('LogicalName2-FakeLoadingMetric') self.assertEquals(len(v), 1) @@ -105,9 +103,15 @@ class TimelineBasedMeasurementTest( def testTimelineBasedForSmoke(self): ps = self.CreatePageSetFromFileInUnittestDataDir( 'interaction_enabled_page.html') - setattr(ps.pages[0], 'smoothness', {'action': 'wait', - 'javascript': 'window.animationDone'}) + setattr(ps.pages[0], 'smoothness', [{'action': 'scroll'}, + {'action': 'wait', + 'javascript': 'window.animationDone'}]) measurement = tbm_module.TimelineBasedMeasurement() results = self.RunMeasurement(measurement, ps, options=self._options) self.assertEquals(0, len(results.failures)) + v = results.FindAllPageSpecificValuesNamed('CenterAnimation-jank') + self.assertEquals(len(v), 2) + v = results.FindAllPageSpecificValuesNamed('DrawerAnimation-jank') + self.assertEquals(len(v), 2) + diff --git a/tools/perf/metrics/smoothness.py b/tools/perf/metrics/smoothness.py index 4041900..50756e9 100644 --- a/tools/perf/metrics/smoothness.py +++ b/tools/perf/metrics/smoothness.py @@ -2,20 +2,13 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from metrics import Metric +from metrics import timeline_based_metric from metrics import rendering_stats from metrics import statistics +from telemetry.core.timeline import bounds from telemetry.page import page_measurement from telemetry.page.perf_tests_helper import FlattenList -from telemetry.core.timeline.model import TimelineModel -TIMELINE_MARKER = 'Smoothness' - - -class MissingDisplayFrameRateError(page_measurement.MeasurementFailure): - def __init__(self, name): - super(MissingDisplayFrameRateError, self).__init__( - 'Missing display frame rate metrics: ' + name) class NotEnoughFramesError(page_measurement.MeasurementFailure): def __init__(self): @@ -28,87 +21,52 @@ class NoSupportedActionError(page_measurement.MeasurementFailure): super(NoSupportedActionError, self).__init__( 'None of the actions is supported by smoothness measurement') - -def _GetSyntheticDelayCategoriesFromPage(page): - if not hasattr(page, 'synthetic_delays'): - return [] - result = [] - for delay, options in page.synthetic_delays.items(): - options = '%f;%s' % (options.get('target_duration', 0), - options.get('mode', 'static')) - result.append('DELAY(%s;%s)' % (delay, options)) - return result - - -class SmoothnessMetric(Metric): +class SmoothnessMetric(timeline_based_metric.TimelineBasedMetric): def __init__(self): super(SmoothnessMetric, self).__init__() - self._stats = None - self._actions = [] - - def AddActionToIncludeInMetric(self, action): - self._actions.append(action) - def Start(self, page, tab): - custom_categories = ['webkit.console', 'benchmark'] - custom_categories += _GetSyntheticDelayCategoriesFromPage(page) - tab.browser.StartTracing(','.join(custom_categories), 60) - tab.ExecuteJavaScript('console.time("' + TIMELINE_MARKER + '")') - if tab.browser.platform.IsRawDisplayFrameRateSupported(): - tab.browser.platform.StartRawDisplayFrameRateMeasurement() - - def Stop(self, page, tab): - if tab.browser.platform.IsRawDisplayFrameRateSupported(): - tab.browser.platform.StopRawDisplayFrameRateMeasurement() - tab.ExecuteJavaScript('console.timeEnd("' + TIMELINE_MARKER + '")') - tracing_timeline_data = tab.browser.StopTracing() - timeline_model = TimelineModel(timeline_data=tracing_timeline_data) - timeline_ranges = [ action.GetActiveRangeOnTimeline(timeline_model) - for action in self._actions ] - - renderer_process = timeline_model.GetRendererProcessFromTab(tab) - self._stats = rendering_stats.RenderingStats( - renderer_process, timeline_model.browser_process, timeline_ranges) - - if not self._stats.frame_times: - raise NotEnoughFramesError() - - def AddResults(self, tab, results): - if self._stats.mouse_wheel_scroll_latency: + def AddResults(self, model, renderer_thread, interaction_record, results): + renderer_process = renderer_thread.parent + time_bounds = bounds.Bounds() + time_bounds.AddValue(interaction_record.start) + time_bounds.AddValue(interaction_record.end) + stats = rendering_stats.RenderingStats( + renderer_process, model.browser_process, [time_bounds]) + if stats.mouse_wheel_scroll_latency: mean_mouse_wheel_scroll_latency = statistics.ArithmeticMean( - self._stats.mouse_wheel_scroll_latency, - len(self._stats.mouse_wheel_scroll_latency)) + stats.mouse_wheel_scroll_latency, + len(stats.mouse_wheel_scroll_latency)) mouse_wheel_scroll_latency_discrepancy = statistics.DurationsDiscrepancy( - self._stats.mouse_wheel_scroll_latency) + stats.mouse_wheel_scroll_latency) results.Add('mean_mouse_wheel_scroll_latency', 'ms', round(mean_mouse_wheel_scroll_latency, 3)) results.Add('mouse_wheel_scroll_latency_discrepancy', '', round(mouse_wheel_scroll_latency_discrepancy, 4)) - if self._stats.touch_scroll_latency: + if stats.touch_scroll_latency: mean_touch_scroll_latency = statistics.ArithmeticMean( - self._stats.touch_scroll_latency, - len(self._stats.touch_scroll_latency)) + stats.touch_scroll_latency, + len(stats.touch_scroll_latency)) touch_scroll_latency_discrepancy = statistics.DurationsDiscrepancy( - self._stats.touch_scroll_latency) + stats.touch_scroll_latency) results.Add('mean_touch_scroll_latency', 'ms', round(mean_touch_scroll_latency, 3)) results.Add('touch_scroll_latency_discrepancy', '', round(touch_scroll_latency_discrepancy, 4)) - if self._stats.js_touch_scroll_latency: + if stats.js_touch_scroll_latency: mean_js_touch_scroll_latency = statistics.ArithmeticMean( - self._stats.js_touch_scroll_latency, - len(self._stats.js_touch_scroll_latency)) + stats.js_touch_scroll_latency, + len(stats.js_touch_scroll_latency)) js_touch_scroll_latency_discrepancy = statistics.DurationsDiscrepancy( - self._stats.js_touch_scroll_latency) + stats.js_touch_scroll_latency) results.Add('mean_js_touch_scroll_latency', 'ms', round(mean_js_touch_scroll_latency, 3)) results.Add('js_touch_scroll_latency_discrepancy', '', round(js_touch_scroll_latency_discrepancy, 4)) # List of raw frame times. - frame_times = FlattenList(self._stats.frame_times) + frame_times = FlattenList(stats.frame_times) results.Add('frame_times', 'ms', frame_times) # Arithmetic mean of frame times. @@ -118,16 +76,10 @@ class SmoothnessMetric(Metric): # Absolute discrepancy of frame time stamps. frame_discrepancy = statistics.TimestampsDiscrepancy( - self._stats.frame_timestamps) + stats.frame_timestamps) results.Add('jank', 'ms', round(frame_discrepancy, 4)) # Are we hitting 60 fps for 95 percent of all frames? # We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0. percentile_95 = statistics.Percentile(frame_times, 95.0) results.Add('mostly_smooth', 'score', 1.0 if percentile_95 < 19.0 else 0.0) - - if tab.browser.platform.IsRawDisplayFrameRateSupported(): - for r in tab.browser.platform.GetRawDisplayFrameRateMeasurements(): - if r.value is None: - raise MissingDisplayFrameRateError(r.name) - results.Add(r.name, r.unit, r.value) diff --git a/tools/perf/metrics/smoothness_unittest.py b/tools/perf/metrics/smoothness_unittest.py deleted file mode 100644 index afcaed7..0000000 --- a/tools/perf/metrics/smoothness_unittest.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import unittest - -from metrics import smoothness - -from telemetry.page import page - - -class FakePlatform(object): - def IsRawDisplayFrameRateSupported(self): - return False - - -class FakeBrowser(object): - def __init__(self): - self.platform = FakePlatform() - self.category_filter = None - - def StartTracing(self, category_filter, _): - self.category_filter = category_filter - - -class FakeTab(object): - def __init__(self): - self.browser = FakeBrowser() - - def ExecuteJavaScript(self, js): - pass - - -class SmoothnessMetricUnitTest(unittest.TestCase): - def testSyntheticDelayConfiguration(self): - attributes = { - 'synthetic_delays': { - 'cc.BeginMainFrame': { 'target_duration': 0.012 }, - 'cc.DrawAndSwap': { 'target_duration': 0.012, 'mode': 'alternating' }, - 'gpu.SwapBuffers': { 'target_duration': 0.012 } - } - } - test_page = page.Page('http://dummy', None, attributes=attributes) - - tab = FakeTab() - smoothness_metric = smoothness.SmoothnessMetric() - smoothness_metric.Start(test_page, tab) - - expected_category_filter = [ - 'DELAY(cc.BeginMainFrame;0.012000;static)', - 'DELAY(cc.DrawAndSwap;0.012000;alternating)', - 'DELAY(gpu.SwapBuffers;0.012000;static)', - 'benchmark', - 'webkit.console' - ] - self.assertEquals(expected_category_filter, - sorted(tab.browser.category_filter.split(','))) diff --git a/tools/perf/metrics/timeline.py b/tools/perf/metrics/timeline.py index 9313d99..89b1b7f 100644 --- a/tools/perf/metrics/timeline.py +++ b/tools/perf/metrics/timeline.py @@ -51,7 +51,12 @@ class TimelineMetric(Metric): if self._mode == TRACING_MODE: if not tab.browser.supports_tracing: raise Exception('Not supported') - tab.browser.StartTracing(self.trace_categories) + if self.trace_categories: + categories = [self.trace_categories] + \ + page.GetSyntheticDelayCategories() + else: + categories = page.GetSyntheticDelayCategories() + tab.browser.StartTracing(','.join(categories)) else: assert self._mode == TIMELINE_MODE tab.StartTimelineRecording() diff --git a/tools/perf/metrics/timeline_based_metric.py b/tools/perf/metrics/timeline_based_metric.py index 781c61f..ecfc343 100644 --- a/tools/perf/metrics/timeline_based_metric.py +++ b/tools/perf/metrics/timeline_based_metric.py @@ -2,6 +2,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. + class TimelineBasedMetric(object): def __init__(self): """Computes metrics from a telemetry.core.timeline Model and a range diff --git a/tools/perf/metrics/timeline_interaction_record.py b/tools/perf/metrics/timeline_interaction_record.py index 87d575d..c2da4ad 100644 --- a/tools/perf/metrics/timeline_interaction_record.py +++ b/tools/perf/metrics/timeline_interaction_record.py @@ -46,13 +46,19 @@ class TimelineInteractionRecord(object): time-range. """ - def __init__(self, event): - self.start = event.start - self.end = event.end + def __init__(self, logical_name, start, end): + assert logical_name + self.logical_name = logical_name + self.start = start + self.end = end + self.is_smooth = False + self.is_loading_resources = False + @staticmethod + def FromEvent(event): m = re.match('Interaction\.(.+)\/(.+)', event.name) if m: - self.logical_name = m.group(1) + logical_name = m.group(1) if m.group(1) != '': flags = m.group(2).split(',') else: @@ -60,15 +66,17 @@ class TimelineInteractionRecord(object): else: m = re.match('Interaction\.(.+)', event.name) assert m - self.logical_name = m.group(1) + logical_name = m.group(1) flags = [] + record = TimelineInteractionRecord(logical_name, event.start, event.end) for f in flags: if not f in ('is_smooth', 'is_loading_resources'): raise Exception( 'Unrecognized flag in timeline Interaction record: %s' % f) - self.is_smooth = 'is_smooth' in flags - self.is_loading_resources = 'is_loading_resources' in flags + record.is_smooth = 'is_smooth' in flags + record.is_loading_resources = 'is_loading_resources' in flags + return record def GetResultNameFor(self, result_name): - return "%s/%s" % (self.logical_name, result_name) + return "%s-%s" % (self.logical_name, result_name) diff --git a/tools/perf/metrics/timeline_interaction_record_unittest.py b/tools/perf/metrics/timeline_interaction_record_unittest.py index 99cf017..8bc80bd 100644 --- a/tools/perf/metrics/timeline_interaction_record_unittest.py +++ b/tools/perf/metrics/timeline_interaction_record_unittest.py @@ -21,7 +21,7 @@ class ParseTests(unittest.TestCase): s = async_slice.AsyncSlice( 'cat', event_name, timestamp=1, duration=2) - return timeline_interaction_record.TimelineInteractionRecord(s) + return timeline_interaction_record.TimelineInteractionRecord.FromEvent(s) def testCreate(self): r = self.CreateRecord('Interaction.LogicalName') diff --git a/tools/telemetry/telemetry/page/page.py b/tools/telemetry/telemetry/page/page.py index 337b11c..b0e3129 100644 --- a/tools/telemetry/telemetry/page/page.py +++ b/tools/telemetry/telemetry/page/page.py @@ -6,6 +6,8 @@ import os import re import urlparse +from telemetry import decorators + class Page(object): def __init__(self, url, page_set, attributes=None, base_dir=None): @@ -33,6 +35,17 @@ class Page(object): raise AttributeError( '%r object has no attribute %r' % (self.__class__, name)) + @decorators.Cache + def GetSyntheticDelayCategories(self): + if not hasattr(self, 'synthetic_delays'): + return [] + result = [] + for delay, options in self.synthetic_delays.items(): + options = '%f;%s' % (options.get('target_duration', 0), + options.get('mode', 'static')) + result.append('DELAY(%s;%s)' % (delay, options)) + return result + def __lt__(self, other): return self.url < other.url |