diff options
| author | bolian@chromium.org <bolian@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-03-27 11:54:00 +0000 |
|---|---|---|
| committer | bolian@chromium.org <bolian@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-03-27 11:54:00 +0000 |
| commit | 28e203300ff945e49b7b1c37376d6380f75744e5 (patch) | |
| tree | 424decb4aa92c9b9497c87d7a530f99846567c4f | |
| parent | 9aa2bf89a2fd0a1b7e80eaa2f8cbe0acae7b52d4 (diff) | |
| download | chromium_src-28e203300ff945e49b7b1c37376d6380f75744e5.zip chromium_src-28e203300ff945e49b7b1c37376d6380f75744e5.tar.gz chromium_src-28e203300ff945e49b7b1c37376d6380f75744e5.tar.bz2 | |
Added telemetry test metric network.py.
To be used by https://codereview.chromium.org/191383003/.
BUG=320748
Review URL: https://codereview.chromium.org/211133004
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@259837 0039d316-1c4b-4281-b951-d872f2087c98
| -rw-r--r-- | tools/perf/metrics/network.py | 191 | ||||
| -rw-r--r-- | tools/perf/metrics/network_unittest.py | 174 | ||||
| -rw-r--r-- | tools/perf/metrics/test_page_measurement_results.py | 30 | ||||
| -rw-r--r-- | tools/perf/metrics/timeline_unittest.py | 32 |
4 files changed, 399 insertions, 28 deletions
diff --git a/tools/perf/metrics/network.py b/tools/perf/metrics/network.py new file mode 100644 index 0000000..bd84c4f --- /dev/null +++ b/tools/perf/metrics/network.py @@ -0,0 +1,191 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import base64 +import gzip +import hashlib +import io +import logging +import zlib + +from metrics import Metric +from telemetry.page import page_measurement +# All network metrics are Chrome only for now. +from telemetry.core.backends.chrome import inspector_network +from telemetry.core.timeline import recording_options + + +class NetworkMetricException(page_measurement.MeasurementFailure): + pass + + +class HTTPResponse(object): + """ Represents an HTTP response from a timeline event.""" + def __init__(self, event): + self._response = ( + inspector_network.InspectorNetworkResponseData.FromTimelineEvent(event)) + self._content_length = None + + @property + def response(self): + return self._response + + @property + def url_signature(self): + return hashlib.md5(self.response.url).hexdigest() + + @property + def content_length(self): + if self._content_length is None: + self._content_length = self.GetContentLength() + return self._content_length + + @property + def has_original_content_length(self): + return 'X-Original-Content-Length' in self.response.headers + + @property + def original_content_length(self): + if self.has_original_content_length: + return int(self.response.GetHeader('X-Original-Content-Length')) + return 0 + + @property + def data_saving_rate(self): + if (self.response.served_from_cache or + not self.has_original_content_length or + self.original_content_length <= 0): + return 0.0 + return (float(self.original_content_length - self.content_length) / + self.original_content_length) + + def GetContentLengthFromBody(self): + resp = self.response + body, base64_encoded = resp.GetBody() + if not body: + return 0 + # The binary data like images, etc is base64_encoded. Decode it to get + # the actualy content length. + if base64_encoded: + decoded = base64.b64decode(body) + return len(decoded) + + encoding = resp.GetHeader('Content-Encoding') + if not encoding: + return len(body) + # The response body returned from a timeline event is always decompressed. + # So, we need to compress it to get the actual content length if headers + # say so. + encoding = encoding.lower() + if encoding == 'gzip': + return self.GetGizppedBodyLength(body) + elif encoding == 'deflate': + return len(zlib.compress(body, 9)) + else: + raise NetworkMetricException, ( + 'Unknown Content-Encoding %s for %s' % (encoding, resp.url)) + + def GetContentLength(self): + cl = 0 + try: + cl = self.GetContentLengthFromBody() + except Exception, e: + resp = self.response + logging.warning('Fail to get content length for %s from body: %s', + resp.url[:100], e) + cl_header = resp.GetHeader('Content-Length') + if cl_header: + cl = int(cl_header) + else: + body, _ = resp.GetBody() + if body: + cl = len(body) + return cl + + @staticmethod + def GetGizppedBodyLength(body): + if not body: + return 0 + bio = io.BytesIO() + try: + with gzip.GzipFile(fileobj=bio, mode="wb", compresslevel=9) as f: + f.write(body.encode('utf-8')) + except Exception, e: + logging.warning('Fail to gzip response body: %s', e) + raise e + return len(bio.getvalue()) + + +class NetworkMetric(Metric): + """A network metric based on timeline events.""" + + def __init__(self): + super(NetworkMetric, self).__init__() + + # Whether to add detailed result for each sub-resource in a page. + self.add_result_for_resource = False + self.compute_data_saving = False + self._events = None + + def Start(self, page, tab): + self._events = None + opts = recording_options.TimelineRecordingOptions() + opts.record_network = True + tab.StartTimelineRecording(opts) + + def Stop(self, page, tab): + assert self._events is None + tab.StopTimelineRecording() + + def IterResponses(self, tab): + if self._events is None: + self._events = tab.timeline_model.GetAllEventsOfName('HTTPResponse') + if len(self._events) == 0: + return + for e in self._events: + yield self.ResponseFromEvent(e) + + def ResponseFromEvent(self, event): + return HTTPResponse(event) + + def AddResults(self, tab, results): + content_length = 0 + original_content_length = 0 + + for resp in self.IterResponses(tab): + # Ignore content length calculation for cache hit. + if resp.response.served_from_cache: + continue + + resource = resp.response.url + resource_signature = resp.url_signature + cl = resp.content_length + if resp.has_original_content_length: + ocl = resp.original_content_length + if ocl < cl: + logging.warning('original content length (%d) is less than content ' + 'lenght(%d) for resource %s', ocl, cl, resource) + if self.add_result_for_resource: + results.Add('resource_data_saving_' + resource_signature, + 'percent', resp.data_saving_rate * 100) + results.Add('resource_original_content_length_' + resource_signature, + 'bytes', ocl) + original_content_length += ocl + else: + original_content_length += cl + if self.add_result_for_resource: + results.Add( + 'resource_content_length_' + resource_signature, 'bytes', cl) + content_length += cl + + results.Add('content_length', 'bytes', content_length) + results.Add('original_content_length', 'bytes', original_content_length) + if self.compute_data_saving: + if (original_content_length > 0 and + original_content_length >= content_length): + saving = (float(original_content_length-content_length) * 100 / + original_content_length) + results.Add('data_saving', 'percent', saving) + else: + results.Add('data_saving', 'percent', 0.0) diff --git a/tools/perf/metrics/network_unittest.py b/tools/perf/metrics/network_unittest.py new file mode 100644 index 0000000..316f7c0 --- /dev/null +++ b/tools/perf/metrics/network_unittest.py @@ -0,0 +1,174 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import base64 +import unittest + +from metrics import test_page_measurement_results +from metrics import network +from telemetry.core.timeline import event + + +HTML_BODY = """<!DOCTYPE HTML> + <html> + <head> </head> + <body> + <div id="test"> TEST HTML</div> + </body> + </html>""" +IMAGE_BODY = """fake image data""" +GZIPPED_HTML_LEN = network.HTTPResponse.GetGizppedBodyLength(HTML_BODY) +# Make up original content length for the image. +IMAGE_OCL = 3 * len(IMAGE_BODY) + + +class NetworkMetricTest(unittest.TestCase): + @staticmethod + def MakeNetworkTimelineEvent( + url, response_headers, body=None, base64_encoded_body=False, + served_from_cache=False, request_headers=None, status=200): + if not request_headers: + request_headers = {} + e = event.TimelineEvent('network', 'HTTPResponse', 0, 0) + e.args = {} + e.args['requestId'] = 0 + e.args['response'] = { + 'status': status, + 'url': url, + 'headers': response_headers, + 'requestHeaders': request_headers, + } + e.args['body'] = body + e.args['base64_encoded_body'] = base64_encoded_body + e.args['served_from_cache'] = served_from_cache + return e + + def testHTTPResponse(self): + url = 'http://test.url' + self.assertLess(GZIPPED_HTML_LEN, len(HTML_BODY)) + + # A plain text HTML response + resp = network.HTTPResponse(self.MakeNetworkTimelineEvent( + url=url, + response_headers={ + 'Content-Type': 'text/html', + 'Content-Length': str(len(HTML_BODY)), + }, + body=HTML_BODY)) + self.assertEqual(url, resp.response.url) + body, base64_encoded = resp.response.GetBody() + self.assertEqual(HTML_BODY, body) + self.assertFalse(base64_encoded) + self.assertEqual('text/html', resp.response.GetHeader('Content-Type')) + + self.assertEqual(len(HTML_BODY), resp.content_length) + self.assertEqual(None, resp.response.GetHeader('Content-Encoding')) + self.assertFalse(resp.has_original_content_length) + self.assertEqual(0.0, resp.data_saving_rate) + + # A gzipped HTML response + resp = network.HTTPResponse(self.MakeNetworkTimelineEvent( + url=url, + response_headers={ + 'Content-Type': 'text/html', + 'Content-Encoding': 'gzip', + 'X-Original-Content-Length': str(len(HTML_BODY)), + }, + body=HTML_BODY)) + body, base64_encoded = resp.response.GetBody() + self.assertFalse(base64_encoded) + self.assertEqual(GZIPPED_HTML_LEN, resp.content_length) + self.assertEqual('gzip', resp.response.GetHeader('Content-Encoding')) + self.assertTrue(resp.has_original_content_length) + self.assertEqual(len(HTML_BODY), resp.original_content_length) + self.assertEqual( + float(len(HTML_BODY) - GZIPPED_HTML_LEN) / len(HTML_BODY), + resp.data_saving_rate) + + # A JPEG image response. + resp = network.HTTPResponse(self.MakeNetworkTimelineEvent( + url='http://test.image', + response_headers={ + 'Content-Type': 'image/jpeg', + 'Content-Encoding': 'gzip', + 'X-Original-Content-Length': str(IMAGE_OCL), + }, + body=base64.b64encode(IMAGE_BODY), + base64_encoded_body=True)) + body, base64_encoded = resp.response.GetBody() + self.assertTrue(base64_encoded) + self.assertEqual(IMAGE_BODY, base64.b64decode(body)) + self.assertEqual(len(IMAGE_BODY), resp.content_length) + self.assertTrue(resp.has_original_content_length) + self.assertEqual(IMAGE_OCL, resp.original_content_length) + self.assertFalse(resp.response.served_from_cache) + self.assertEqual(float(IMAGE_OCL - len(IMAGE_BODY)) / IMAGE_OCL, + resp.data_saving_rate) + + # A JPEG image response from cache. + resp = network.HTTPResponse(self.MakeNetworkTimelineEvent( + url='http://test.image', + response_headers={ + 'Content-Type': 'image/jpeg', + 'Content-Encoding': 'gzip', + 'X-Original-Content-Length': str(IMAGE_OCL), + }, + body=base64.b64encode(IMAGE_BODY), + base64_encoded_body=True, + served_from_cache=True)) + self.assertEqual(len(IMAGE_BODY), resp.content_length) + self.assertTrue(resp.has_original_content_length) + self.assertEqual(IMAGE_OCL, resp.original_content_length) + # Cached resource has zero saving. + self.assertTrue(resp.response.served_from_cache) + self.assertEqual(0.0, resp.data_saving_rate) + + def testNetworkMetricResults(self): + events = [ + # A plain text HTML. + self.MakeNetworkTimelineEvent( + url='http://test.html1', + response_headers={ + 'Content-Type': 'text/html', + 'Content-Length': str(len(HTML_BODY)), + }, + body=HTML_BODY), + # A compressed HTML. + self.MakeNetworkTimelineEvent( + url='http://test.html2', + response_headers={ + 'Content-Type': 'text/html', + 'Content-Encoding': 'gzip', + 'X-Original-Content-Length': str(len(HTML_BODY)), + }, + body=HTML_BODY), + # A base64 encoded image. + self.MakeNetworkTimelineEvent( + url='http://test.image', + response_headers={ + 'Content-Type': 'image/jpeg', + 'Content-Encoding': 'gzip', + 'X-Original-Content-Length': str(IMAGE_OCL), + }, + body=base64.b64encode(IMAGE_BODY), + base64_encoded_body=True), + ] + metric = network.NetworkMetric() + metric._events = events + metric.compute_data_saving = True + + self.assertTrue(len(events), len(list(metric.IterResponses(None)))) + results = test_page_measurement_results.TestPageMeasurementResults(self) + metric.AddResults(None, results) + + cl = len(HTML_BODY) + GZIPPED_HTML_LEN + len(IMAGE_BODY) + results.AssertHasPageSpecificScalarValue('content_length', 'bytes', cl) + + ocl = len(HTML_BODY) + len(HTML_BODY) + IMAGE_OCL + results.AssertHasPageSpecificScalarValue( + 'original_content_length', 'bytes', ocl) + + saving_percent = float(ocl - cl) * 100/ ocl + results.AssertHasPageSpecificScalarValue( + 'data_saving', 'percent', saving_percent) diff --git a/tools/perf/metrics/test_page_measurement_results.py b/tools/perf/metrics/test_page_measurement_results.py new file mode 100644 index 0000000..e1df8bd --- /dev/null +++ b/tools/perf/metrics/test_page_measurement_results.py @@ -0,0 +1,30 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from telemetry.page import page_measurement_results +from telemetry.page import page as page_module +from telemetry.value import scalar + +class TestPageMeasurementResults( + page_measurement_results.PageMeasurementResults): + def __init__(self, test): + super(TestPageMeasurementResults, self).__init__() + self.test = test + page = page_module.Page("http://www.google.com", {}) + self.WillMeasurePage(page) + + def GetPageSpecificValueNamed(self, name): + values = [value for value in self.all_page_specific_values + if value.name == name] + assert len(values) == 1, 'Could not find value named %s' % name + return values[0] + + def AssertHasPageSpecificScalarValue(self, name, units, expected_value): + value = self.GetPageSpecificValueNamed(name) + self.test.assertEquals(units, value.units) + self.test.assertTrue(isinstance(value, scalar.ScalarValue)) + self.test.assertEquals(expected_value, value.value) + + def __str__(self): + return '\n'.join([repr(x) for x in self.all_page_specific_values]) diff --git a/tools/perf/metrics/timeline_unittest.py b/tools/perf/metrics/timeline_unittest.py index c6db833..6d19606 100644 --- a/tools/perf/metrics/timeline_unittest.py +++ b/tools/perf/metrics/timeline_unittest.py @@ -4,39 +4,15 @@ import unittest +from metrics import test_page_measurement_results from metrics import timeline from telemetry.core.timeline import bounds from telemetry.core.timeline import model as model_module -from telemetry.page import page as page_module -from telemetry.page import page_measurement_results -from telemetry.value import scalar - -class TestPageMeasurementResults( - page_measurement_results.PageMeasurementResults): - def __init__(self, test): - super(TestPageMeasurementResults, self).__init__() - self.test = test - page = page_module.Page("http://www.google.com", {}) - self.WillMeasurePage(page) - - def GetPageSpecificValueNamed(self, name): - values = [value for value in self.all_page_specific_values - if value.name == name] - assert len(values) == 1, 'Could not find value named %s' % name - return values[0] - - def AssertHasPageSpecificScalarValue(self, name, units, expected_value): - value = self.GetPageSpecificValueNamed(name) - self.test.assertEquals(units, value.units) - self.test.assertTrue(isinstance(value, scalar.ScalarValue)) - self.test.assertEquals(expected_value, value.value) - - def __str__(self): - return '\n'.join([repr(x) for x in self.all_page_specific_values]) + class LoadTimesTimelineMetric(unittest.TestCase): def GetResults(self, metric): - results = TestPageMeasurementResults(self) + results = test_page_measurement_results.TestPageMeasurementResults(self) tab = None metric.AddResults(tab, results) return results @@ -82,7 +58,7 @@ class LoadTimesTimelineMetric(unittest.TestCase): class ThreadTimesTimelineMetricUnittest(unittest.TestCase): def GetResults(self, metric): - results = TestPageMeasurementResults(self) + results = test_page_measurement_results.TestPageMeasurementResults(self) tab = None metric.AddResults(tab, results) return results |
