diff options
author | tonyg@chromium.org <tonyg@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-01-27 02:48:29 +0000 |
---|---|---|
committer | tonyg@chromium.org <tonyg@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-01-27 02:48:29 +0000 |
commit | 036ed29329b70aff503a2e8731ee452ea27ddba0 (patch) | |
tree | 536d8f848116e682fb0f98b62c58e1adf8307bcc | |
parent | 17ef3970feba55f1f77d7bcc8ec0352edf3f5147 (diff) | |
download | chromium_src-036ed29329b70aff503a2e8731ee452ea27ddba0.zip chromium_src-036ed29329b70aff503a2e8731ee452ea27ddba0.tar.gz chromium_src-036ed29329b70aff503a2e8731ee452ea27ddba0.tar.bz2 |
Re-Reland: [Telemetry] Properly handle retries after browser crash.
Previously it was easy for the results to get into a bad state and assert in a
variety of ways.
This ensures we ignore the partial results in the case of a crash.
This reland is tested with GPU functional tests. The main fix was to make all
result types shallow copyable.
BUG=
TBR=tonyg@chromium.org
Review URL: https://codereview.chromium.org/147853002
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@247184 0039d316-1c4b-4281-b951-d872f2087c98
12 files changed, 115 insertions, 67 deletions
diff --git a/tools/telemetry/telemetry/page/block_page_measurement_results.py b/tools/telemetry/telemetry/page/block_page_measurement_results.py index a3f0972..d92ae86 100644 --- a/tools/telemetry/telemetry/page/block_page_measurement_results.py +++ b/tools/telemetry/telemetry/page/block_page_measurement_results.py @@ -7,28 +7,25 @@ from telemetry.page import page_measurement_results class BlockPageMeasurementResults( page_measurement_results.PageMeasurementResults): - def __init__(self, output_file): - super(BlockPageMeasurementResults, self).__init__() - self._output_file = output_file + def __init__(self, output_stream): + super(BlockPageMeasurementResults, self).__init__(output_stream) def DidMeasurePage(self): - values = self.page_specific_values_for_current_page - if not values: - # Do not output if no results were added on this page. + try: + values = self.page_specific_values_for_current_page + if not values: + # Do not output if no results were added on this page. + return + lines = ['name: %s' % values[0].page.display_name] + for value in sorted(values, key=lambda x: x.name): + lines.append('%s (%s): %s' % + (value.name, + value.units, + value.GetRepresentativeString())) + for line in lines: + self._output_stream.write(line) + self._output_stream.write(os.linesep) + self._output_stream.write(os.linesep) + self._output_stream.flush() + finally: super(BlockPageMeasurementResults, self).DidMeasurePage() - return - lines = ['name: %s' % - values[0].page.display_name] - - for value in sorted(values, key=lambda x: x.name): - lines.append('%s (%s): %s' % - (value.name, - value.units, - value.GetRepresentativeString())) - for line in lines: - self._output_file.write(line) - self._output_file.write(os.linesep) - self._output_file.write(os.linesep) - self._output_file.flush() - - super(BlockPageMeasurementResults, self).DidMeasurePage() diff --git a/tools/telemetry/telemetry/page/buildbot_page_measurement_results.py b/tools/telemetry/telemetry/page/buildbot_page_measurement_results.py index f28afbd..59cab64 100644 --- a/tools/telemetry/telemetry/page/buildbot_page_measurement_results.py +++ b/tools/telemetry/telemetry/page/buildbot_page_measurement_results.py @@ -12,14 +12,16 @@ from telemetry.value import merge_values class BuildbotPageMeasurementResults( page_measurement_results.PageMeasurementResults): - def __init__(self, trace_tag=''): - super(BuildbotPageMeasurementResults, self).__init__() + def __init__(self, output_stream, trace_tag=''): + super(BuildbotPageMeasurementResults, self).__init__(output_stream) self._trace_tag = trace_tag def _PrintPerfResult(self, measurement, trace, v, units, result_type='default'): - perf_tests_helper.PrintPerfResult( - measurement, trace, v, units, result_type) + output = perf_tests_helper.PrintPerfResult( + measurement, trace, v, units, result_type, print_to_stdout=False) + self._output_stream.write(output + '\n') + self._output_stream.flush() def PrintSummary(self): """Print summary data in a format expected by buildbot for perf dashboards. @@ -29,7 +31,7 @@ class BuildbotPageMeasurementResults( """ # Print out the list of unique pages. perf_tests_helper.PrintPages( - [page.display_name for page in self.pages_that_succeeded]) + [page.display_name for page in self.pages_that_succeeded]) self._PrintPerPageResults() self._PrintOverallResults() diff --git a/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py b/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py index 82ce337..c11ab0c 100644 --- a/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py +++ b/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py @@ -22,7 +22,8 @@ def _MakePageSet(): class SummarySavingPageMeasurementResults( buildbot_page_measurement_results.BuildbotPageMeasurementResults): def __init__(self, trace_tag=''): - super(SummarySavingPageMeasurementResults, self).__init__(trace_tag) + super(SummarySavingPageMeasurementResults, self).__init__( + None, trace_tag=trace_tag) self.results = [] def _PrintPerfResult(self, *args): diff --git a/tools/telemetry/telemetry/page/csv_page_measurement_results.py b/tools/telemetry/telemetry/page/csv_page_measurement_results.py index f431176..fdbb812 100644 --- a/tools/telemetry/telemetry/page/csv_page_measurement_results.py +++ b/tools/telemetry/telemetry/page/csv_page_measurement_results.py @@ -9,8 +9,7 @@ from telemetry.value import merge_values class CsvPageMeasurementResults( page_measurement_results.PageMeasurementResults): def __init__(self, output_stream, output_after_every_page=None): - super(CsvPageMeasurementResults, self).__init__() - self._output_stream = output_stream + super(CsvPageMeasurementResults, self).__init__(output_stream) self._results_writer = csv.writer(self._output_stream) self._did_output_header = False self._header_names_written_to_writer = None diff --git a/tools/telemetry/telemetry/page/gtest_test_results.py b/tools/telemetry/telemetry/page/gtest_test_results.py index 69b0002..f70866c 100644 --- a/tools/telemetry/telemetry/page/gtest_test_results.py +++ b/tools/telemetry/telemetry/page/gtest_test_results.py @@ -11,8 +11,7 @@ from telemetry.page import page_test_results class GTestTestResults(page_test_results.PageTestResults): def __init__(self, output_stream): - super(GTestTestResults, self).__init__() - self._output_stream = output_stream + super(GTestTestResults, self).__init__(output_stream) self._timestamp = None def _GetMs(self): diff --git a/tools/telemetry/telemetry/page/html_page_measurement_results.py b/tools/telemetry/telemetry/page/html_page_measurement_results.py index bf058eb..c3f76c0 100644 --- a/tools/telemetry/telemetry/page/html_page_measurement_results.py +++ b/tools/telemetry/telemetry/page/html_page_measurement_results.py @@ -7,6 +7,7 @@ import json import logging import os import re +import sys from telemetry.core import util from telemetry.page import buildbot_page_measurement_results @@ -30,12 +31,11 @@ class HtmlPageMeasurementResults( buildbot_page_measurement_results.BuildbotPageMeasurementResults): def __init__(self, output_stream, test_name, reset_results, upload_results, browser_type, results_label=None, trace_tag=''): - super(HtmlPageMeasurementResults, self).__init__(trace_tag) - - self._output_stream = output_stream + super(HtmlPageMeasurementResults, self).__init__(sys.stdout, trace_tag) self._test_name = test_name self._reset_results = reset_results self._upload_results = upload_results + self._html_output_stream = output_stream self._existing_results = self._ReadExistingResults(output_stream) self._result = { 'buildTime': self._GetBuildTime(), @@ -78,9 +78,9 @@ class HtmlPageMeasurementResults( return json.loads(m.group(1))[:512] def _SaveResults(self, results): - self._output_stream.seek(0) - self._output_stream.write(results) - self._output_stream.truncate() + self._html_output_stream.seek(0) + self._html_output_stream.write(results) + self._html_output_stream.truncate() def _PrintPerfResult(self, measurement, trace, values, units, result_type='default'): @@ -115,7 +115,7 @@ class HtmlPageMeasurementResults( self._SaveResults(html) if self._upload_results: - file_path = os.path.abspath(self._output_stream.name) + file_path = os.path.abspath(self._html_output_stream.name) file_name = 'html-results/results-%s' % datetime.datetime.now().strftime( '%Y-%m-%d_%H-%M-%S') cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET, file_name, file_path) @@ -123,4 +123,5 @@ class HtmlPageMeasurementResults( print ('View online at ' 'http://storage.googleapis.com/chromium-telemetry/%s' % file_name) print - print 'View result at file://%s' % os.path.abspath(self._output_stream.name) + print 'View result at file://%s' % os.path.abspath( + self._html_output_stream.name) diff --git a/tools/telemetry/telemetry/page/page_measurement_results.py b/tools/telemetry/telemetry/page/page_measurement_results.py index ef8208b..81af5cb 100644 --- a/tools/telemetry/telemetry/page/page_measurement_results.py +++ b/tools/telemetry/telemetry/page/page_measurement_results.py @@ -7,8 +7,8 @@ from telemetry.page import page_test_results from telemetry.value import value_backcompat class PageMeasurementResults(page_test_results.PageTestResults): - def __init__(self, trace_tag=''): - super(PageMeasurementResults, self).__init__() + def __init__(self, output_stream=None, trace_tag=''): + super(PageMeasurementResults, self).__init__(output_stream) self._done = False self._trace_tag = trace_tag diff --git a/tools/telemetry/telemetry/page/page_measurement_results_unittest.py b/tools/telemetry/telemetry/page/page_measurement_results_unittest.py index 2e48e6a..3b33c86 100644 --- a/tools/telemetry/telemetry/page/page_measurement_results_unittest.py +++ b/tools/telemetry/telemetry/page/page_measurement_results_unittest.py @@ -126,13 +126,15 @@ class PageMeasurementResultsTest(unittest.TestCase): results.PrintSummary() self.assertEquals(results.results, []) - def test_get_succesful_page_values_merged_no_failures(self): + def test_get_successful_page_values_merged_no_failures(self): results = SummarySavingPageMeasurementResults() results.WillMeasurePage(self.pages[0]) results.Add('a', 'seconds', 3) self.assertEquals(1, len(results.page_specific_values_for_current_page)) results.DidMeasurePage() - self.assertRaises(lambda: results.page_specific_values_for_current_page) + self.assertRaises( + AssertionError, + lambda: results.page_specific_values_for_current_page) def test_get_all_values_for_successful_pages(self): results = SummarySavingPageMeasurementResults() diff --git a/tools/telemetry/telemetry/page/page_runner.py b/tools/telemetry/telemetry/page/page_runner.py index c870a3e..4b9da6d 100644 --- a/tools/telemetry/telemetry/page/page_runner.py +++ b/tools/telemetry/telemetry/page/page_runner.py @@ -2,6 +2,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import collections +import copy import glob import logging import os @@ -18,7 +19,6 @@ from telemetry.core import util from telemetry.core import wpr_modes from telemetry.core.platform.profiler import profiler_finder from telemetry.page import page_filter as page_filter_module -from telemetry.page import page_measurement_results from telemetry.page import page_runner_repeat from telemetry.page import page_test from telemetry.page import results_options @@ -107,9 +107,6 @@ class _RunState(object): if started_browser: self.browser.tabs[0].WaitForDocumentReadyStateToBeComplete() - if self.first_page[page]: - self.first_page[page] = False - def StopBrowser(self): if self.browser: self.browser.Close() @@ -205,21 +202,19 @@ def _PrepareAndRunPage(test, page_set, expectations, finder_options, wpr_modes.WPR_REPLAY if page.archive_path and os.path.isfile(page.archive_path) else wpr_modes.WPR_OFF) - results_for_current_run = results - if state.first_page[page] and test.discard_first_result: - # If discarding results, substitute a dummy object. - results_for_current_run = page_measurement_results.PageMeasurementResults() - results_for_current_run.StartTest(page) + tries = 3 while tries: tries -= 1 try: + results_for_current_run = copy.copy(results) + results_for_current_run.StartTest(page) if test.RestartBrowserBeforeEachPage(): state.StopBrowser() # If we are restarting the browser for each page customize the per page # options for just the current page before starting the browser. state.StartBrowserIfNeeded(test, page_set, page, possible_browser, - credentials_path, page.archive_path) + credentials_path, page.archive_path) expectation = expectations.GetExpectationForPage(state.browser, page) @@ -247,7 +242,13 @@ def _PrepareAndRunPage(test, page_set, expectations, finder_options, if (test.StopBrowserAfterPage(state.browser, page)): state.StopBrowser() - break + results_for_current_run.StopTest(page) + + if state.first_page[page]: + state.first_page[page] = False + if test.discard_first_result: + return results + return results_for_current_run except exceptions.BrowserGoneException: _LogStackTrace('Browser crashed', state.browser) logging.warning('Lost connection to browser. Retrying.') @@ -259,7 +260,6 @@ def _PrepareAndRunPage(test, page_set, expectations, finder_options, logging.error( 'Lost connection to browser during multi-tab test. Failing.') raise - results_for_current_run.StopTest(page) def Run(test, page_set, expectations, finder_options): @@ -334,10 +334,9 @@ def Run(test, page_set, expectations, finder_options): state.repeat_state.WillRunPage() test.WillRunPageRepeats(page) while state.repeat_state.ShouldRepeatPage(): - # execute test on page - _PrepareAndRunPage(test, page_set, expectations, finder_options, - browser_options, page, credentials_path, - possible_browser, results, state) + results = _PrepareAndRunPage( + test, page_set, expectations, finder_options, browser_options, + page, credentials_path, possible_browser, results, state) state.repeat_state.DidRunPage() test.DidRunPageRepeats(page) if test.IsExiting(): diff --git a/tools/telemetry/telemetry/page/page_runner_unittest.py b/tools/telemetry/telemetry/page/page_runner_unittest.py index d652cb7..30c71bf 100644 --- a/tools/telemetry/telemetry/page/page_runner_unittest.py +++ b/tools/telemetry/telemetry/page/page_runner_unittest.py @@ -14,8 +14,8 @@ from telemetry.page import page_measurement from telemetry.page import page_set from telemetry.page import page_test from telemetry.page import page_runner -from telemetry.unittest import options_for_unittests from telemetry.page import test_expectations +from telemetry.unittest import options_for_unittests SIMPLE_CREDENTIALS_STRING = """ { @@ -131,7 +131,7 @@ class PageRunnerTests(unittest.TestCase): self.assertEquals(1, len(results.successes)) self.assertEquals(0, len(results.failures)) - self.assertEquals(1, len(results.errors)) + self.assertEquals(0, len(results.errors)) def testDiscardFirstResult(self): ps = page_set.PageSet() @@ -179,6 +179,43 @@ class PageRunnerTests(unittest.TestCase): self.assertEquals(0, len(results.successes)) self.assertEquals(0, len(results.failures)) + def testPagesetRepeat(self): + ps = page_set.PageSet() + expectations = test_expectations.TestExpectations() + ps.pages.append(page_module.Page( + 'file://blank.html', ps, base_dir=util.GetUnittestDataDir())) + ps.pages.append(page_module.Page( + 'file://green_rect.html', ps, base_dir=util.GetUnittestDataDir())) + + class Measurement(page_measurement.PageMeasurement): + i = 0 + def MeasurePage(self, _, __, results): + self.i += 1 + results.Add('metric', 'unit', self.i) + + output_file = tempfile.NamedTemporaryFile(delete=False).name + try: + options = options_for_unittests.GetCopy() + options.output_format = 'buildbot' + options.output_file = output_file + options.reset_results = None + options.upload_results = None + options.results_label = None + + options.repeat_options.page_repeat_iters = 1 + options.repeat_options.pageset_repeat_iters = 2 + results = page_runner.Run(Measurement(), ps, expectations, options) + results.PrintSummary() + self.assertEquals(4, len(results.successes)) + self.assertEquals(0, len(results.failures)) + stdout = open(output_file).read() + self.assertIn('RESULT metric_by_url: blank.html= [1,3] unit', stdout) + self.assertIn('RESULT metric_by_url: green_rect.html= [2,4] unit', stdout) + self.assertIn('*RESULT metric: metric= [1,2,3,4] unit', stdout) + finally: + results._output_stream.close() # pylint: disable=W0212 + os.remove(output_file) + def testCredentialsWhenLoginFails(self): credentials_backend = StubCredentialsBackend(login_return_value=False) did_run = self.runCredentialsTest(credentials_backend) diff --git a/tools/telemetry/telemetry/page/page_test_results.py b/tools/telemetry/telemetry/page/page_test_results.py index 8c763fc..2910b1e 100644 --- a/tools/telemetry/telemetry/page/page_test_results.py +++ b/tools/telemetry/telemetry/page/page_test_results.py @@ -2,19 +2,31 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import collections +import copy import logging import sys import traceback import unittest class PageTestResults(unittest.TestResult): - def __init__(self): + def __init__(self, output_stream=None): super(PageTestResults, self).__init__() + self._output_stream = output_stream self.pages_that_had_errors = set() self.pages_that_had_failures = set() self.successes = [] self.skipped = [] + def __copy__(self): + cls = self.__class__ + result = cls.__new__(cls) + for k, v in self.__dict__.items(): + if isinstance(v, collections.Container): + v = copy.copy(v) + setattr(result, k, v) + return result + @property def pages_that_had_errors_or_failures(self): return self.pages_that_had_errors.union( diff --git a/tools/telemetry/telemetry/page/results_options.py b/tools/telemetry/telemetry/page/results_options.py index 775b9e3..19dcd99 100644 --- a/tools/telemetry/telemetry/page/results_options.py +++ b/tools/telemetry/telemetry/page/results_options.py @@ -65,17 +65,16 @@ def PrepareResults(test, options): if options.output_format == 'none': return page_measurement_results.PageMeasurementResults( - trace_tag=options.output_trace_tag) + output_stream, trace_tag=options.output_trace_tag) elif options.output_format == 'csv': return csv_page_measurement_results.CsvPageMeasurementResults( - output_stream, - test.results_are_the_same_on_every_page) + output_stream, test.results_are_the_same_on_every_page) elif options.output_format == 'block': return block_page_measurement_results.BlockPageMeasurementResults( output_stream) elif options.output_format == 'buildbot': return buildbot_page_measurement_results.BuildbotPageMeasurementResults( - trace_tag=options.output_trace_tag) + output_stream, trace_tag=options.output_trace_tag) elif options.output_format == 'gtest': return gtest_test_results.GTestTestResults(output_stream) elif options.output_format == 'html': |