diff options
author | tonyg@chromium.org <tonyg@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2013-07-24 00:28:00 +0000 |
---|---|---|
committer | tonyg@chromium.org <tonyg@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2013-07-24 00:28:00 +0000 |
commit | ba06a5f479c95a5bfda26547bc2cf5c5f118572e (patch) | |
tree | 606fb391d75ad57f9b206905491a10c0b82c14b7 /tools/perf | |
parent | dfd41778d45ceb852a5ee1b99411d4673a67bca3 (diff) | |
download | chromium_src-ba06a5f479c95a5bfda26547bc2cf5c5f118572e.zip chromium_src-ba06a5f479c95a5bfda26547bc2cf5c5f118572e.tar.gz chromium_src-ba06a5f479c95a5bfda26547bc2cf5c5f118572e.tar.bz2 |
Update loading_measurement_analyzer to display CPU vs Network breakdowns.
BUG=None
NOTRY=True
Review URL: https://chromiumcodereview.appspot.com/19925002
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@213293 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'tools/perf')
-rwxr-xr-x | tools/perf/measurements/loading_measurement_analyzer.py | 47 |
1 files changed, 40 insertions, 7 deletions
diff --git a/tools/perf/measurements/loading_measurement_analyzer.py b/tools/perf/measurements/loading_measurement_analyzer.py index a5abe11..395b61d 100755 --- a/tools/perf/measurements/loading_measurement_analyzer.py +++ b/tools/perf/measurements/loading_measurement_analyzer.py @@ -29,6 +29,9 @@ class LoadingMeasurementAnalyzer(object): self.totals = collections.defaultdict(list) self.maxes = collections.defaultdict(list) self.avgs = collections.defaultdict(list) + self.load_times = [] + self.cpu_times = [] + self.network_percents = [] self.num_rows_parsed = 0 self.num_slowest_urls = options.num_slowest_urls if options.rank_csv_file: @@ -42,17 +45,30 @@ class LoadingMeasurementAnalyzer(object): if (options.rank_limit and self._GetRank(row['url']) > options.rank_limit): continue + cpu_time = 0 + load_time = float(row['load_time (ms)']) + if load_time < 0: + print 'Skipping %s due to negative load time' % row['url'] + continue for key, value in row.iteritems(): - if key in ('url', 'dom_content_loaded_time (ms)', 'load_time (ms)'): + if key in ('url', 'load_time (ms)', 'dom_content_loaded_time (ms)'): continue if not value or value == '-': continue + value = float(value) if '_avg' in key: - self.avgs[key].append((float(value), row['url'])) + self.avgs[key].append((value, row['url'])) elif '_max' in key: - self.maxes[key].append((float(value), row['url'])) + self.maxes[key].append((value, row['url'])) else: - self.totals[key].append((float(value), row['url'])) + self.totals[key].append((value, row['url'])) + cpu_time += value + self.load_times.append((load_time, row['url'])) + self.cpu_times.append((cpu_time, row['url'])) + if options.show_network: + network_time = load_time - cpu_time + self.totals['Network (ms)'].append((network_time, row['url'])) + self.network_percents.append((network_time / load_time, row['url'])) self.num_rows_parsed += 1 if options.max_rows and self.num_rows_parsed == int(options.max_rows): break @@ -73,17 +89,23 @@ class LoadingMeasurementAnalyzer(object): sum_totals = {} for key, values in self.totals.iteritems(): sum_totals[key] = sum([v[0] for v in values]) - total_time = sum(sum_totals.values()) + total_cpu_time = sum([v[0] for v in self.cpu_times]) + total_page_load_time = sum([v[0] for v in self.load_times]) print print 'Total URLs: ', self.num_rows_parsed - print 'Total time: %ds' % int(round(total_time / 1000)) + print 'Total CPU time: %ds' % int(round(total_cpu_time / 1000)) + print 'Total page load time: %ds' % int(round(total_page_load_time / 1000)) + print 'Average CPU time: %dms' % int(round( + total_cpu_time / self.num_rows_parsed)) + print 'Average page load time: %dms' % int(round( + total_page_load_time / self.num_rows_parsed)) print for key, value in sorted(sum_totals.iteritems(), reverse=True, key=lambda i: i[1]): output_key = '%30s: ' % key.replace(' (ms)', '') output_value = '%10ds ' % (value / 1000) - output_percent = '%.1f%%' % (100 * value / total_time) + output_percent = '%.1f%%' % (100 * value / total_page_load_time) print output_key, output_value, output_percent if not self.num_slowest_urls: @@ -98,6 +120,15 @@ class LoadingMeasurementAnalyzer(object): for value, url in slowest: print '\t', '%dms\t' % value, url, '(#%s)' % self._GetRank(url) + if self.network_percents: + print + print 'Top %d highest network to CPU time ratios:' % self.num_slowest_urls + for percent, url in sorted( + self.network_percents, reverse=True)[:self.num_slowest_urls]: + percent *= 100 + print '\t', '%.1f%%' % percent, url, '(#%s)' % self._GetRank(url) + + def main(argv): prog_desc = 'Parses CSV output from the loading_measurement' parser = optparse.OptionParser(usage=('%prog [options]' + '\n\n' + prog_desc)) @@ -109,6 +140,8 @@ def main(argv): parser.add_option('--rank-csv-file', help='A CSV file of <rank,url>') parser.add_option('--rank-limit', type='int', help='Only process pages higher than this rank') + parser.add_option('--show-network', action='store_true', + help='Whether to display Network as a category') options, args = parser.parse_args(argv[1:]) |