summaryrefslogtreecommitdiffstats
path: root/build/android
diff options
context:
space:
mode:
authorsimonhatch <simonhatch@chromium.org>2015-05-13 15:00:47 -0700
committerCommit bot <commit-bot@chromium.org>2015-05-13 22:01:33 +0000
commit8a9efca9b5451c75da244f62f073206c97ef6dd4 (patch)
treee83722b0e0c994ced42d6668412df23adf26e4a2 /build/android
parent9a6cbec6b716f5fc55d1037bf5ae9b7ebd68ac45 (diff)
downloadchromium_src-8a9efca9b5451c75da244f62f073206c97ef6dd4.zip
chromium_src-8a9efca9b5451c75da244f62f073206c97ef6dd4.tar.gz
chromium_src-8a9efca9b5451c75da244f62f073206c97ef6dd4.tar.bz2
Surface test times of android tests.
Right now we have no easy visibility on time taken for individual tests, since everything runs under one massive "Sharded Perf Tests" step. We're trying to balance the tests across devices to improve cycle time, so we need a way to see how long each test is actually taking. BUG=466101 Review URL: https://codereview.chromium.org/1140783002 Cr-Commit-Position: refs/heads/master@{#329722}
Diffstat (limited to 'build/android')
-rw-r--r--build/android/pylib/perf/test_runner.py30
1 files changed, 22 insertions, 8 deletions
diff --git a/build/android/pylib/perf/test_runner.py b/build/android/pylib/perf/test_runner.py
index 9d1f437..3bee602 100644
--- a/build/android/pylib/perf/test_runner.py
+++ b/build/android/pylib/perf/test_runner.py
@@ -66,11 +66,29 @@ from pylib.base import base_test_runner
from pylib.device import device_errors
+def GetPersistedResult(test_name):
+ file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+ if not os.path.exists(file_name):
+ logging.error('File not found %s', file_name)
+ return None
+
+ with file(file_name, 'r') as f:
+ return pickle.loads(f.read())
+
+
def OutputJsonList(json_input, json_output):
with file(json_input, 'r') as i:
all_steps = json.load(i)
- step_values = [{'test': k, 'device_affinity': v['device_affinity']}
- for k, v in all_steps['steps'].iteritems()]
+
+ step_values = []
+ for k, v in all_steps['steps'].iteritems():
+ data = {'test': k, 'device_affinity': v['device_affinity']}
+
+ persisted_result = GetPersistedResult(k)
+ if persisted_result:
+ data['total_time'] = persisted_result['total_time']
+ step_values.append(data)
+
with file(json_output, 'w') as o:
o.write(json.dumps(step_values))
return 0
@@ -86,13 +104,9 @@ def PrintTestOutput(test_name, json_file_name=None):
Returns:
exit code generated by the test step.
"""
- file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
- if not os.path.exists(file_name):
- logging.error('File not found %s', file_name)
+ persisted_result = GetPersistedResult(test_name)
+ if not persisted_result:
return 1
-
- with file(file_name, 'r') as f:
- persisted_result = pickle.loads(f.read())
logging.info('*' * 80)
logging.info('Output from:')
logging.info(persisted_result['cmd'])