summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorchrisgao@chromium.org <chrisgao@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-05-10 21:29:51 +0000
committerchrisgao@chromium.org <chrisgao@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-05-10 21:29:51 +0000
commit7a5121f28527625ede1d1b09bef98ea2c65d0660 (patch)
tree2a05f031872351bab30e2707f57a928676976737
parentd2414bbe51266d1cced96e0114ff3b76f69cacab (diff)
downloadchromium_src-7a5121f28527625ede1d1b09bef98ea2c65d0660.zip
chromium_src-7a5121f28527625ede1d1b09bef98ea2c65d0660.tar.gz
chromium_src-7a5121f28527625ede1d1b09bef98ea2c65d0660.tar.bz2
Convert dom_perf into a telemetry test.
BUG=230435 NOTRY=true Review URL: https://chromiumcodereview.appspot.com/15065003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@199545 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--tools/perf/page_sets/dom_perf.json15
-rw-r--r--tools/perf/perf_tools/dom_perf.py61
2 files changed, 76 insertions, 0 deletions
diff --git a/tools/perf/page_sets/dom_perf.json b/tools/perf/page_sets/dom_perf.json
new file mode 100644
index 0000000..ef35d28
--- /dev/null
+++ b/tools/perf/page_sets/dom_perf.json
@@ -0,0 +1,15 @@
+{
+ "description": "Dom Perf benchmark",
+ "pages": [
+ { "url": "file:///../../../data/dom_perf/run.html?run=Accessors&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=CloneNodes&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=CreateNodes&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=DOMDivWalk&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=DOMTable&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=DOMWalk&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=Events&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=Get+Elements&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=GridSort&reportInJS=1" },
+ { "url": "file:///../../../data/dom_perf/run.html?run=Template&reportInJS=1" }
+ ]
+}
diff --git a/tools/perf/perf_tools/dom_perf.py b/tools/perf/perf_tools/dom_perf.py
new file mode 100644
index 0000000..cbe3243
--- /dev/null
+++ b/tools/perf/perf_tools/dom_perf.py
@@ -0,0 +1,61 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import math
+
+from telemetry.core import util
+from telemetry.page import page_measurement
+
+
+def _GeometricMean(values):
+ """Compute a rounded geometric mean from an array of values."""
+ if not values:
+ return None
+ # To avoid infinite value errors, make sure no value is less than 0.001.
+ new_values = []
+ for value in values:
+ if value > 0.001:
+ new_values.append(value)
+ else:
+ new_values.append(0.001)
+ # Compute the sum of the log of the values.
+ log_sum = sum(map(math.log, new_values))
+ # Raise e to that sum over the number of values.
+ mean = math.pow(math.e, (log_sum / len(new_values)))
+ # Return the rounded mean.
+ return int(round(mean))
+
+
+SCORE_UNIT = 'score (bigger is better)'
+SCORE_TRACE_NAME = 'score'
+
+
+class DomPerf(page_measurement.PageMeasurement):
+ @property
+ def results_are_the_same_on_every_page(self):
+ return False
+
+ def WillNavigateToPage(self, page, tab):
+ tab.EvaluateJavaScript('document.cookie = "__domperf_finished=0"')
+
+ def MeasurePage(self, page, tab, results):
+ def _IsDone():
+ return tab.GetCookieByName('__domperf_finished') == '1'
+ util.WaitFor(_IsDone, 600, poll_interval=5)
+
+ data = json.loads(tab.EvaluateJavaScript('__domperf_result'))
+ for suite in data['BenchmarkSuites']:
+ # Skip benchmarks that we didn't actually run this time around.
+ if len(suite['Benchmarks']) or suite['score']:
+ results.Add(SCORE_TRACE_NAME, SCORE_UNIT,
+ suite['score'], suite['name'], 'unimportant')
+
+ def DidRunPageSet(self, tab, results):
+ # Now give the geometric mean as the total for the combined runs.
+ scores = []
+ for result in results.page_results:
+ scores.append(result[SCORE_TRACE_NAME].output_value)
+ total = _GeometricMean(scores)
+ results.AddSummary(SCORE_TRACE_NAME, SCORE_UNIT, total, 'Total')