summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorqyearsley@chromium.org <qyearsley@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-10-03 17:43:24 +0000
committerqyearsley@chromium.org <qyearsley@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-10-03 17:43:24 +0000
commit753be54288e18fcf418910c821b054601325aa81 (patch)
treed4532e72cf737cb8594e5a7ec87e03fe0fdfed39 /tools
parent06b88eea33121483727117b1d7cf7027a4210baa (diff)
downloadchromium_src-753be54288e18fcf418910c821b054601325aa81.zip
chromium_src-753be54288e18fcf418910c821b054601325aa81.tar.gz
chromium_src-753be54288e18fcf418910c821b054601325aa81.tar.bz2
Fix smoothness metric unit test
BUG= Review URL: https://codereview.chromium.org/24480002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@226785 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools')
-rw-r--r--tools/perf/measurements/smoothness.py20
-rw-r--r--tools/perf/metrics/smoothness.py45
-rw-r--r--tools/perf/metrics/smoothness_unittest.py342
3 files changed, 166 insertions, 241 deletions
diff --git a/tools/perf/measurements/smoothness.py b/tools/perf/measurements/smoothness.py
index 93b61b2..1d42ad1 100644
--- a/tools/perf/measurements/smoothness.py
+++ b/tools/perf/measurements/smoothness.py
@@ -19,12 +19,6 @@ class MissingDisplayFrameRate(page_measurement.MeasurementFailure):
'Missing display frame rate metrics: ' + name)
-class MissingTimelineMarker(page_measurement.MeasurementFailure):
- def __init__(self, name):
- super(MissingTimelineMarker, self).__init__(
- 'Timeline marker not found: ' + name)
-
-
class Smoothness(page_measurement.PageMeasurement):
def __init__(self):
super(Smoothness, self).__init__('smoothness')
@@ -64,14 +58,6 @@ class Smoothness(page_measurement.PageMeasurement):
self._metrics.Stop()
self._trace_result = tab.browser.StopTracing()
- def FindTimelineMarker(self, timeline, name):
- events = [s for
- s in timeline.GetAllEventsOfName(name)
- if s.parent_slice == None]
- if len(events) != 1:
- raise MissingTimelineMarker(name)
- return events[0]
-
def MeasurePage(self, page, tab, results):
rendering_stats_deltas = self._metrics.deltas
@@ -79,14 +65,14 @@ class Smoothness(page_measurement.PageMeasurement):
raise DidNotScrollException()
timeline = self._trace_result.AsTimelineModel()
- smoothness_marker = self.FindTimelineMarker(timeline,
+ smoothness_marker = smoothness.FindTimelineMarker(timeline,
smoothness.TIMELINE_MARKER)
# TODO(dominikg): remove try..except once CL 23532057 has been rolled to the
# reference builds?
try:
- gesture_marker = self.FindTimelineMarker(timeline,
+ gesture_marker = smoothness.FindTimelineMarker(timeline,
smoothness.SYNTHETIC_GESTURE_MARKER)
- except MissingTimelineMarker:
+ except smoothness.MissingTimelineMarker:
logging.warning(
'No gesture marker found in timeline; using smoothness marker instead.')
gesture_marker = smoothness_marker
diff --git a/tools/perf/metrics/smoothness.py b/tools/perf/metrics/smoothness.py
index 40b9e43..ed89f8b 100644
--- a/tools/perf/metrics/smoothness.py
+++ b/tools/perf/metrics/smoothness.py
@@ -4,8 +4,9 @@
import os
import math
-from telemetry.core import util
from metrics import discrepancy
+from telemetry.core import util
+from telemetry.page import page_measurement
TIMELINE_MARKER = 'smoothness_scroll'
SYNTHETIC_GESTURE_MARKER = 'SyntheticGestureController::running'
@@ -68,6 +69,7 @@ class SmoothnessMetrics(object):
return self._tab.EvaluateJavaScript(
'window.__renderingStats.getDeltas()')
+
def Total(data):
if type(data) == float:
total = data
@@ -79,7 +81,8 @@ def Total(data):
raise TypeError
return total
-def Average(numerator, denominator, scale = None, precision = None):
+
+def Average(numerator, denominator, scale=None, precision=None):
numerator_total = Total(numerator)
denominator_total = Total(denominator)
if denominator_total == 0:
@@ -95,14 +98,16 @@ def Average(numerator, denominator, scale = None, precision = None):
avg = round(avg, precision)
return avg
+
def DivideIfPossibleOrZero(numerator, denominator):
if not denominator:
return 0.0
else:
return numerator / denominator
+
def GeneralizedMean(values, exponent):
- ''' http://en.wikipedia.org/wiki/Generalized_mean '''
+ """See http://en.wikipedia.org/wiki/Generalized_mean"""
if not values:
return 0.0
sum_of_powers = 0.0
@@ -110,6 +115,7 @@ def GeneralizedMean(values, exponent):
sum_of_powers += v ** exponent
return (sum_of_powers / len(values)) ** (1.0/exponent)
+
def Median(values):
if not values:
return 0.0
@@ -121,8 +127,9 @@ def Median(values):
median = 0.5 * (sorted_values[n/2] + sorted_values[n/2 - 1])
return median
+
def Percentile(values, percentile):
- ''' Computed using linear interpolation between closest ranks. '''
+ """Computed using linear interpolation between closest ranks."""
if not values:
return 0.0
sorted_values = sorted(values)
@@ -139,6 +146,7 @@ def Percentile(values, percentile):
alpha = n * percentile - 0.5 - floor_index
return floor_value + alpha * (ceil_value - floor_value)
+
def CalcFirstPaintTimeResults(results, tab):
if tab.browser.is_content_shell:
results.Add('first_paint', 'ms', 'unsupported')
@@ -158,6 +166,7 @@ def CalcFirstPaintTimeResults(results, tab):
results.Add('first_paint', 'ms', round(first_paint_secs * 1000, 1))
+
def CalcResults(benchmark_stats, results):
s = benchmark_stats
@@ -166,16 +175,38 @@ def CalcResults(benchmark_stats, results):
frame_times.append(
round(s.screen_frame_timestamps[i] - s.screen_frame_timestamps[i-1], 2))
- # Scroll Results
+ # List of raw frame times.
results.Add('frame_times', 'ms', frame_times)
- # Arithmetic mean of frame times.
+
+ # Arithmetic mean of frame times. Not the generalized mean.
results.Add('mean_frame_time', 'ms',
Average(s.total_time, s.screen_frame_count, 1000, 3))
+
# Absolute discrepancy of frame time stamps.
results.Add('jank', '',
round(discrepancy.FrameDiscrepancy(s.screen_frame_timestamps,
True), 4))
- # Are we hitting 60 fps for 95 percent of all frames?
+
+ # Are we hitting 60 fps for 95 percent of all frames? (Boolean value)
# We use 17ms as a slightly looser threshold, instead of 1000.0/60.0.
results.Add('mostly_smooth', '',
Percentile(frame_times, 95.0) < 17.0)
+
+
+class MissingTimelineMarker(page_measurement.MeasurementFailure):
+ def __init__(self, name):
+ super(MissingTimelineMarker, self).__init__(
+ 'Timeline marker not found: ' + name)
+
+
+def FindTimelineMarker(timeline, name):
+ """Find the timeline event with the given name.
+
+ If there is not exactly one such timeline event, raise an error.
+ """
+ events = [s for s in timeline.GetAllEventsOfName(name)
+ if s.parent_slice == None]
+ if len(events) != 1:
+ raise MissingTimelineMarker(name)
+ return events[0]
+
diff --git a/tools/perf/metrics/smoothness_unittest.py b/tools/perf/metrics/smoothness_unittest.py
index 3b9960f..4bfc6a9 100644
--- a/tools/perf/metrics/smoothness_unittest.py
+++ b/tools/perf/metrics/smoothness_unittest.py
@@ -1,34 +1,36 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import unittest
+
import random
+import unittest
from metrics import discrepancy
from metrics import smoothness
from metrics.gpu_rendering_stats import GpuRenderingStats
-from telemetry.page import page
-from telemetry.page.page_measurement_results import PageMeasurementResults
from telemetry.core.backends.chrome.tracing_backend import RawTraceResultImpl
from telemetry.core.backends.chrome.trace_result import TraceResult
+from telemetry.page import page
+from telemetry.page.page_measurement_results import PageMeasurementResults
+
class MockTimer(object):
- """ An instance of this class is used as a global timer to generate
- random durations for stats and consistent timestamps for all mock trace
- events.
+ """A mock timer class which can generate random durations.
+
+ An instance of this class is used as a global timer to generate random
+ durations for stats and consistent timestamps for all mock trace events.
"""
def __init__(self):
self.microseconds = 0
- def Advance(self, low = 0, high = 100000):
+ def Advance(self, low=0, high=100000):
duration = random.randint(low, high)
self.microseconds += duration
return duration
+
class MockFrame(object):
- """ This class mocks rendering stats, texture stats and latency stats for
- a single frame
- """
+ """Mocks rendering, texture and latency stats for a single frame."""
def __init__(self, mock_timer):
""" Initialize the stats to random values """
self.start = mock_timer.microseconds
@@ -49,8 +51,8 @@ class MockFrame(object):
self.impl_stats['screen_frame_count'] = 1
self.impl_stats['dropped_frame_count'] = random.randint(0, 4)
self.impl_stats['rasterize_time'] = mock_timer.Advance()
- self.impl_stats['rasterize_time_for_now_bins_on_pending_tree'] = \
- mock_timer.Advance()
+ self.impl_stats['rasterize_time_for_now_bins_on_pending_tree'] = (
+ mock_timer.Advance())
self.impl_stats['best_rasterize_time'] = mock_timer.Advance()
self.impl_stats['rasterized_pixel_count'] = random.randint(0, 2000000)
self.impl_stats['impl_thread_scroll_count'] = random.randint(0, 4)
@@ -63,63 +65,23 @@ class MockFrame(object):
self.impl_stats['solid_color_tile_analysis_count'] = random.randint(0, 1000)
self.impl_stats['deferred_image_decode_time'] = mock_timer.Advance()
self.impl_stats['tile_analysis_time'] = mock_timer.Advance()
- self.texture_stats['texture_upload_count'] = random.randint(0, 1000)
- self.texture_stats['texture_upload_time'] = mock_timer.Advance()
- self.latency_stats['input_event_count'] = random.randint(0, 20)
- self.latency_stats['input_event_latency'] = mock_timer.Advance()
- self.latency_stats['touch_ui_count'] = random.randint(0, 20)
- self.latency_stats['touch_ui_latency'] = mock_timer.Advance()
- self.latency_stats['touch_acked_count'] = random.randint(0, 20)
- self.latency_stats['touch_acked_latency'] = mock_timer.Advance()
- self.latency_stats['scroll_update_count'] = random.randint(0, 20)
- self.latency_stats['scroll_update_latency'] = mock_timer.Advance()
self.end = mock_timer.microseconds
self.duration = self.end - self.start
- def AddToRenderingStats(self, rendering_stats):
- """ Add the stats for this frame to a mock RenderingStats object (i.e. a
- dictionary)
+ def AppendTraceEventForMainThreadStats(self, trace_events):
+ """Appends a trace event with the main thread stats.
+
+ The trace event is a dict with the following keys:
+ 'name',
+ 'tts' (thread timestamp),
+ 'pid' (process id),
+ 'ts' (timestamp),
+ 'cat' (category),
+ 'tid' (thread id),
+ 'ph' (phase),
+ 'args' (a dict with the key 'data').
+ This is related to src/base/debug/trace_event.h.
"""
- rs = rendering_stats
- rs['totalTimeInSeconds'] += self.duration / 1e6
- rs['numFramesSentToScreen'] += (self.main_stats['screen_frame_count'] +
- self.impl_stats['screen_frame_count'])
- rs['droppedFrameCount'] += self.impl_stats['dropped_frame_count']
- rs['numImplThreadScrolls'] += self.impl_stats['impl_thread_scroll_count']
- rs['numMainThreadScrolls'] += self.impl_stats['main_thread_scroll_count']
- rs['numLayersDrawn'] += self.impl_stats['drawn_layer_count']
- rs['numMissingTiles'] += self.impl_stats['missing_tile_count']
- rs['textureUploadCount'] += self.texture_stats['texture_upload_count']
- rs['totalTextureUploadTimeInSeconds'] += \
- self.texture_stats['texture_upload_time']
- rs['totalCommitCount'] += self.main_stats['commit_count']
- rs['totalCommitTimeInSeconds'] += self.main_stats['commit_time']
- rs['totalDeferredImageDecodeCount'] += self.impl_stats[
- 'deferred_image_decode_count']
- rs['totalDeferredImageDecodeTimeInSeconds'] += self.impl_stats[
- 'deferred_image_decode_time']
- rs['totalDeferredImageCacheHitCount'] += self.impl_stats[
- 'deferred_image_cache_hit_count']
- rs['totalImageGatheringCount'] += self.main_stats['image_gathering_count']
- rs['totalImageGatheringTimeInSeconds'] += self.main_stats[
- 'image_gathering_time']
- rs['totalTilesAnalyzed'] += self.impl_stats['tile_analysis_count']
- rs['totalTileAnalysisTimeInSeconds'] += self.impl_stats[
- 'tile_analysis_time']
- rs['solidColorTilesAnalyzed'] += self.impl_stats[
- 'solid_color_tile_analysis_count']
- rs['inputEventCount'] += self.latency_stats['input_event_count']
- rs['totalInputLatency'] += self.latency_stats['input_event_latency']
- rs['touchUICount'] += self.latency_stats['touch_ui_count']
- rs['totalTouchUILatency'] += self.latency_stats['touch_ui_latency']
- rs['touchAckedCount'] += self.latency_stats['touch_acked_count']
- rs['totalTouchAckedLatency'] += self.latency_stats['touch_acked_latency']
- rs['scrollUpdateCount'] += self.latency_stats['scroll_update_count']
- rs['totalScrollUpdateLatency'] += self.latency_stats[
- 'scroll_update_latency']
-
- def AppendTraceEventForMainThreadStats(self, trace):
- """ Append a trace event with the main thread stats to trace """
event = {'name': 'MainThreadRenderingStats::IssueTraceEvent',
'tts': self.end,
'pid': 20978,
@@ -129,10 +91,10 @@ class MockFrame(object):
'tid': 11,
'ph': 'i',
'args': {'data': self.main_stats}}
- trace.append(event)
+ trace_events.append(event)
- def AppendTraceEventForImplThreadStats(self, trace):
- """ Append a trace event with the impl thread stat to trace """
+ def AppendTraceEventForImplThreadStats(self, trace_events):
+ """Appends a trace event with the impl thread stat."""
event = {'name': 'ImplThreadRenderingStats::IssueTraceEvent',
'tts': self.end,
'pid': 20978,
@@ -142,175 +104,121 @@ class MockFrame(object):
'tid': 11,
'ph': 'i',
'args': {'data': self.impl_stats}}
- trace.append(event)
+ trace_events.append(event)
-class SmoothnessMetricsUnitTest(unittest.TestCase):
- def FindTimelineMarker(self, timeline):
- events = [s for
- s in timeline.GetAllEventsOfName(
- smoothness.TIMELINE_MARKER)
- if s.parent_slice == None]
- if len(events) != 1:
- raise LookupError, 'timeline marker not found'
- return events[0]
+class SmoothnessMetricUnitTest(unittest.TestCase):
def testCalcResultsTraceEvents(self):
# Make the test repeatable by seeding the random number generator
+ # (which is used by the mock timer) with a constant number.
random.seed(1234567)
mock_timer = MockTimer()
- trace = []
- rendering_stats = {
- 'totalTimeInSeconds': 0.0,
- 'numFramesSentToScreen': 0.0,
- 'droppedFrameCount': 0.0,
- 'numImplThreadScrolls': 0.0,
- 'numMainThreadScrolls': 0.0,
- 'numLayersDrawn': 0.0,
- 'numMissingTiles': 0.0,
- 'textureUploadCount': 0.0,
- 'totalTextureUploadTimeInSeconds': 0.0,
- 'totalCommitCount': 0.0,
- 'totalCommitTimeInSeconds': 0.0,
- 'totalDeferredImageDecodeCount': 0.0,
- 'totalDeferredImageDecodeTimeInSeconds': 0.0,
- 'totalDeferredImageCacheHitCount': 0.0,
- 'totalImageGatheringCount': 0.0,
- 'totalImageGatheringTimeInSeconds': 0.0,
- 'totalTilesAnalyzed': 0.0,
- 'totalTileAnalysisTimeInSeconds': 0.0,
- 'solidColorTilesAnalyzed': 0.0,
- 'inputEventCount': 0.0,
- 'totalInputLatency': 0.0,
- 'touchUICount': 0.0,
- 'totalTouchUILatency': 0.0,
- 'touchAckedCount': 0.0,
- 'totalTouchAckedLatency': 0.0,
- 'scrollUpdateCount': 0.0,
- 'totalScrollUpdateLatency': 0.0}
-
- # Append a start trace event for the timeline marker
- trace.append({'name': smoothness.TIMELINE_MARKER,
- 'tts': mock_timer.microseconds,
- 'args': {},
- 'pid': 20978,
- 'ts': mock_timer.microseconds,
- 'cat': 'webkit',
- 'tid': 11,
- 'ph': 'S',
- 'id': '0xafb37737e249e055'})
- # Generate 100 random mock frames, append their trace events, and accumulate
- # stats in rendering_stats.
+ trace_events = []
+ total_time_seconds = 0.0
+ num_frames_sent = 0.0
+ previous_frame_time = None
+ # This list represents time differences between frames in milliseconds.
+ expected_frame_times = []
+
+ # Append start trace events for the timeline marker and gesture marker,
+ # with some amount of time in between them.
+ trace_events.append({'name': smoothness.TIMELINE_MARKER,
+ 'tts': mock_timer.microseconds,
+ 'args': {},
+ 'pid': 20978,
+ 'ts': mock_timer.microseconds,
+ 'cat': 'webkit',
+ 'tid': 11,
+ 'ph': 'S', # Phase: start.
+ 'id': '0x12345'})
+ mock_timer.Advance()
+ trace_events.append({'name': smoothness.SYNTHETIC_GESTURE_MARKER,
+ 'tts': mock_timer.microseconds,
+ 'args': {},
+ 'pid': 20978,
+ 'ts': mock_timer.microseconds,
+ 'cat': 'webkit',
+ 'tid': 11,
+ 'ph': 'S',
+ 'id': '0xabcde'})
+
+ # Generate 100 random mock frames and append their trace events.
for _ in xrange(0, 100):
mock_frame = MockFrame(mock_timer)
- mock_frame.AppendTraceEventForMainThreadStats(trace)
- mock_frame.AppendTraceEventForImplThreadStats(trace)
- mock_frame.AddToRenderingStats(rendering_stats)
- # Append finish trace event for timeline marker
- trace.append({'name': smoothness.TIMELINE_MARKER,
- 'tts': mock_timer.microseconds,
- 'args': {},
- 'pid': 20978,
- 'ts': mock_timer.microseconds,
- 'cat': 'webkit',
- 'tid': 11,
- 'ph': 'F',
- 'id': '0xafb37737e249e055'})
-
- # Create timeline object from the trace
- trace_impl = RawTraceResultImpl(trace)
+ mock_frame.AppendTraceEventForMainThreadStats(trace_events)
+ mock_frame.AppendTraceEventForImplThreadStats(trace_events)
+ total_time_seconds += mock_frame.duration / 1e6
+ num_frames_sent += mock_frame.main_stats['screen_frame_count']
+ num_frames_sent += mock_frame.impl_stats['screen_frame_count']
+ current_frame_time = mock_timer.microseconds / 1000.0
+ if previous_frame_time:
+ difference = current_frame_time - previous_frame_time
+ difference = round(difference, 2)
+ expected_frame_times.append(difference)
+ previous_frame_time = current_frame_time
+
+ # Append finish trace events for the timeline and gesture markers, in the
+ # reverse order from how they were added, with some time in between.
+ trace_events.append({'name': smoothness.SYNTHETIC_GESTURE_MARKER,
+ 'tts': mock_timer.microseconds,
+ 'args': {},
+ 'pid': 20978,
+ 'ts': mock_timer.microseconds,
+ 'cat': 'webkit',
+ 'tid': 11,
+ 'ph': 'F', # Phase: finish.
+ 'id': '0xabcde'})
+ mock_timer.Advance()
+ trace_events.append({'name': smoothness.TIMELINE_MARKER,
+ 'tts': mock_timer.microseconds,
+ 'args': {},
+ 'pid': 20978,
+ 'ts': mock_timer.microseconds,
+ 'cat': 'webkit',
+ 'tid': 11,
+ 'ph': 'F',
+ 'id': '0x12345'})
+
+ # Create a timeline object from the trace.
+ trace_impl = RawTraceResultImpl(trace_events)
trace_result = TraceResult(trace_impl)
timeline = trace_result.AsTimelineModel()
+ # Find the timeline marker and gesture marker in the timeline,
+ # and create a GpuRenderingStats object.
+ smoothness_marker = smoothness.FindTimelineMarker(
+ timeline, smoothness.TIMELINE_MARKER)
+ gesture_marker = smoothness.FindTimelineMarker(
+ timeline, smoothness.SYNTHETIC_GESTURE_MARKER)
+ stats = GpuRenderingStats(
+ smoothness_marker, gesture_marker, {}, True)
- timeline_marker = self.FindTimelineMarker(timeline)
- stats = GpuRenderingStats(timeline_marker,
- rendering_stats,
- True)
-
+ # Make a results object and add results to it from the smoothness metric.
res = PageMeasurementResults()
res.WillMeasurePage(page.Page('http://foo.com/', None))
smoothness.CalcResults(stats, res)
res.DidMeasurePage()
- rs = rendering_stats
-
- # Scroll Results
- self.assertAlmostEquals(
- round(rs['totalTimeInSeconds'] / rs['numFramesSentToScreen'] * 1000.0,
- 3),
- res.page_results[0]['mean_frame_time'].value, 2)
- # We don't verify the correctness of the discrepancy computation
- # itself, because we have a separate unit test for that purpose.
self.assertEquals(
- round(discrepancy.FrameDiscrepancy(stats.screen_frame_timestamps,
- True), 4),
- res.page_results[0]['experimental_jank'].value)
+ expected_frame_times,
+ res.page_results[0]['frame_times'].value)
self.assertAlmostEquals(
- round(rs['droppedFrameCount'] / (rs['numFramesSentToScreen'] +
- rs['droppedFrameCount']) * 100.0, 1),
- res.page_results[0]['dropped_percent'].value)
- self.assertAlmostEquals(
- round(rs['numImplThreadScrolls'] / (rs['numImplThreadScrolls'] +
- rs['numMainThreadScrolls']) * 100.0,
- 1),
- res.page_results[0]['percent_impl_scrolled'].value)
- self.assertAlmostEquals(
- round(rs['numLayersDrawn'] / rs['numFramesSentToScreen'], 1),
- res.page_results[0]['average_num_layers_drawn'].value)
- self.assertAlmostEquals(
- round(rs['numMissingTiles'] / rs['numFramesSentToScreen'], 1),
- res.page_results[0]['average_num_missing_tiles'].value)
+ 1000.0 * (total_time_seconds / num_frames_sent),
+ res.page_results[0]['mean_frame_time'].value,
+ places=2)
- # Texture Upload Results
- self.assertAlmostEquals(
- round(rs['totalCommitTimeInSeconds'] / rs['totalCommitCount'] * 1000.0,
- 3),
- res.page_results[0]['average_commit_time'].value)
- self.assertEquals(
- rs['textureUploadCount'],
- res.page_results[0]['texture_upload_count'].value)
- self.assertEquals(
- rs['totalTextureUploadTimeInSeconds'],
- res.page_results[0]['total_texture_upload_time'].value)
-
- # Image Decoding Results
- self.assertEquals(
- rs['totalDeferredImageDecodeCount'],
- res.page_results[0]['total_deferred_image_decode_count'].value)
- self.assertEquals(
- rs['totalDeferredImageCacheHitCount'],
- res.page_results[0]['total_image_cache_hit_count'].value)
+ # We don't verify the correctness of the discrepancy computation itself,
+ # because we have a separate unit test for that purpose.
self.assertAlmostEquals(
- round(rs['totalImageGatheringTimeInSeconds'] /
- rs['totalImageGatheringCount'] * 1000.0, 3),
- res.page_results[0]['average_image_gathering_time'].value)
+ discrepancy.FrameDiscrepancy(stats.screen_frame_timestamps, True),
+ res.page_results[0]['jank'].value,
+ places=4)
+
+ # We do not verify the correctness of Percentile here; Percentile should
+ # have its own test.
+ # The 17 here represents a threshold of 17 ms; this should match the value
+ # in the smoothness metric.
self.assertEquals(
- rs['totalDeferredImageDecodeTimeInSeconds'],
- res.page_results[0]['total_deferred_image_decoding_time'].value)
+ smoothness.Percentile(expected_frame_times, 95.0) < 17.0,
+ res.page_results[0]['mostly_smooth'].value)
- # Tile Analysis Results
- self.assertEquals(
- rs['totalTilesAnalyzed'],
- res.page_results[0]['total_tiles_analyzed'].value)
- self.assertEquals(
- rs['solidColorTilesAnalyzed'],
- res.page_results[0]['solid_color_tiles_analyzed'].value)
- self.assertAlmostEquals(
- round(rs['totalTileAnalysisTimeInSeconds'] /
- rs['totalTilesAnalyzed'] * 1000.0, 3),
- res.page_results[0]['average_tile_analysis_time'].value)
-
- # Latency Results
- self.assertAlmostEquals(
- round(rs['totalInputLatency'] / rs['inputEventCount'] * 1000.0, 3),
- res.page_results[0]['average_latency'].value)
- self.assertAlmostEquals(
- round(rs['totalTouchUILatency'] / rs['touchUICount'] * 1000.0, 3),
- res.page_results[0]['average_touch_ui_latency'].value)
- self.assertAlmostEquals(
- round(rs['totalTouchAckedLatency'] / rs['touchAckedCount'] * 1000.0, 3),
- res.page_results[0]['average_touch_acked_latency'].value)
- self.assertAlmostEquals(
- round(rs['totalScrollUpdateLatency'] / rs['scrollUpdateCount'] * 1000.0,
- 3),
- res.page_results[0]['average_scroll_update_latency'].value)