summaryrefslogtreecommitdiffstats
path: root/chrome/browser/performance_monitor
diff options
context:
space:
mode:
authormtytel@chromium.org <mtytel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-06-11 23:58:23 +0000
committermtytel@chromium.org <mtytel@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2012-06-11 23:58:23 +0000
commitce560c323c954ff1e199abe53ef634e5bf2d39e4 (patch)
tree42244fa1b19ea1367bf44818e12b5510962b96f5 /chrome/browser/performance_monitor
parentff2758f5f089dee85a030cb70bea2b3bad38c8fa (diff)
downloadchromium_src-ce560c323c954ff1e199abe53ef634e5bf2d39e4.zip
chromium_src-ce560c323c954ff1e199abe53ef634e5bf2d39e4.tar.gz
chromium_src-ce560c323c954ff1e199abe53ef634e5bf2d39e4.tar.bz2
Chrome Performance Monitor: Metric Aggregation
We need metric data in different resolutions. This added utility re-samples metric data for a given resolution. Unit tests included. BUG=130212 TEST= Review URL: https://chromiumcodereview.appspot.com/10536099 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@141567 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/browser/performance_monitor')
-rw-r--r--chrome/browser/performance_monitor/performance_monitor_util.cc38
-rw-r--r--chrome/browser/performance_monitor/performance_monitor_util.h12
-rw-r--r--chrome/browser/performance_monitor/performance_monitor_util_unittest.cc99
3 files changed, 149 insertions, 0 deletions
diff --git a/chrome/browser/performance_monitor/performance_monitor_util.cc b/chrome/browser/performance_monitor/performance_monitor_util.cc
index 18cd15a..be8cbe5 100644
--- a/chrome/browser/performance_monitor/performance_monitor_util.cc
+++ b/chrome/browser/performance_monitor/performance_monitor_util.cc
@@ -7,11 +7,49 @@
#include "base/json/json_writer.h"
#include "base/memory/scoped_ptr.h"
#include "base/string_number_conversions.h"
+#include "base/time.h"
#include "chrome/browser/performance_monitor/events.h"
namespace performance_monitor {
namespace util {
+std::vector<MetricInfo> AggregateMetric(
+ const std::vector<MetricInfo>& metric_infos,
+ const base::Time& start,
+ const base::TimeDelta& resolution) {
+ std::vector<MetricInfo> results;
+ // Ignore all the points before the aggregation start.
+ std::vector<MetricInfo>::const_iterator it = metric_infos.begin();
+ for (; it != metric_infos.end() && it->time < start; ++it) { }
+
+ while (it != metric_infos.end()) {
+ // Finds the beginning of the next aggregation window.
+ int64 window_offset = (it->time - start) / resolution;
+ base::Time window_start = start + (window_offset * resolution);
+ base::Time window_end = window_start + resolution;
+ base::Time last_sample_time = window_start;
+ double integrated = 0.0;
+ double metric_value = 0.0;
+
+ // Aggregate the step function defined by the MetricInfos in |metric_infos|.
+ while (it != metric_infos.end() && it->time <= window_end) {
+ metric_value = it->value;
+ integrated += metric_value * (it->time - last_sample_time).InSecondsF();
+ last_sample_time = it->time;
+ ++it;
+ }
+ if (it != metric_infos.end())
+ metric_value = it->value;
+
+ // If the window splits an area of the step function, split the aggregation
+ // at the end of the window.
+ integrated += metric_value * (window_end - last_sample_time).InSecondsF();
+ double average = integrated / resolution.InSecondsF();
+ results.push_back(MetricInfo(window_end, average));
+ }
+ return results;
+}
+
scoped_ptr<Event> CreateExtensionInstallEvent(
const base::Time& time,
const std::string& id,
diff --git a/chrome/browser/performance_monitor/performance_monitor_util.h b/chrome/browser/performance_monitor/performance_monitor_util.h
index a2c1d3a..e9d53b9 100644
--- a/chrome/browser/performance_monitor/performance_monitor_util.h
+++ b/chrome/browser/performance_monitor/performance_monitor_util.h
@@ -7,11 +7,23 @@
#include "base/time.h"
#include "chrome/browser/performance_monitor/event.h"
+#include "chrome/browser/performance_monitor/metric_info.h"
#include "chrome/common/extensions/extension_constants.h"
namespace performance_monitor {
namespace util {
+// Metric data can be either dense or sporadic, so AggregateMetric() normalizes
+// the metric data in time. |metric_infos| must be sorted in increasing time.
+// Put concisely, AggregateMetric() does sample rate conversion from irregular
+// metric data points to a sample period of |resolution| beginning at |start|.
+// Each sampling window starts and ends at an integer multiple away from
+// |start| and data points are omitted if there are no points to resample.
+std::vector<MetricInfo> AggregateMetric(
+ const std::vector<MetricInfo>& metric_infos,
+ const base::Time& start,
+ const base::TimeDelta& resolution);
+
// These are a collection of methods designed to create an event to store the
// pertinent information, given all the fields. Please use these methods to
// create any PerformanceMonitor events, as this will ensure strong-typing
diff --git a/chrome/browser/performance_monitor/performance_monitor_util_unittest.cc b/chrome/browser/performance_monitor/performance_monitor_util_unittest.cc
new file mode 100644
index 0000000..712dd11
--- /dev/null
+++ b/chrome/browser/performance_monitor/performance_monitor_util_unittest.cc
@@ -0,0 +1,99 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+
+#include "base/time.h"
+#include "base/string_number_conversions.h"
+#include "chrome/browser/performance_monitor/metric_info.h"
+#include "chrome/browser/performance_monitor/performance_monitor_util.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace performance_monitor {
+
+class PerformanceMonitorUtilTest : public ::testing::Test {
+
+};
+
+TEST(PerformanceMonitorUtilTest, AggregateMetricEmptyTest) {
+ std::vector<MetricInfo> metric;
+ const base::Time data_time = base::Time::FromDoubleT(1);
+ metric.push_back(MetricInfo(data_time, 1));
+
+ const base::Time results_time = base::Time::FromDoubleT(3);
+ const base::TimeDelta resolution = base::TimeDelta::FromSeconds(1);
+ const std::vector<MetricInfo> aggregated_metric =
+ util::AggregateMetric(metric, results_time, resolution);
+ ASSERT_EQ(0u, aggregated_metric.size());
+}
+
+TEST(PerformanceMonitorUtilTest, AggregateMetricSimpleTest) {
+ const base::Time data_time = base::Time::FromDoubleT(2);
+ const base::Time results_time = base::Time::FromDoubleT(1);
+ const base::TimeDelta results_resolution = base::TimeDelta::FromSeconds(2);
+
+ const double value = 3.14;
+ std::vector<MetricInfo> metric;
+ metric.push_back(MetricInfo(data_time, value));
+ const std::vector<MetricInfo> aggregated_metric =
+ util::AggregateMetric(metric, results_time, results_resolution);
+
+ ASSERT_EQ(1u, aggregated_metric.size());
+ ASSERT_EQ(results_time + results_resolution, aggregated_metric[0].time);
+ ASSERT_EQ(value, aggregated_metric[0].value);
+}
+
+TEST(PerformanceMonitorUtilTest, AggregateMetricDenseTest) {
+ base::Time current_data_time = base::Time::FromDoubleT(2);
+ const base::TimeDelta data_resolution = base::TimeDelta::FromSeconds(1);
+ const base::Time results_time = base::Time::FromDoubleT(6);
+ const base::TimeDelta results_resolution = base::TimeDelta::FromSeconds(4);
+ double current_value = 0;
+ int num_points = 12;
+ std::vector<MetricInfo> metric;
+
+ for (int i = 0; i < num_points; ++i) {
+ metric.push_back(MetricInfo(current_data_time, current_value));
+ current_value += 1;
+ current_data_time += data_resolution;
+ }
+ const std::vector<MetricInfo> aggregated_metric =
+ util::AggregateMetric(metric, results_time, results_resolution);
+ // The first 4 points get ignored because they are before the start time.
+ // The remaining 8 points are aggregated into two data points.
+ ASSERT_EQ(2u, aggregated_metric.size());
+ ASSERT_EQ(results_time + results_resolution, aggregated_metric[0].time);
+ ASSERT_EQ(results_time + (2 * results_resolution), aggregated_metric[1].time);
+}
+
+TEST(PerformanceMonitorUtilTest, AggregateMetricSparseTest) {
+ std::vector<MetricInfo> metric;
+
+ const base::Time data_time1 = base::Time::FromDoubleT(20);
+ const double value1 = 3.14;
+ metric.push_back(MetricInfo(data_time1, value1));
+ const base::Time data_time2 = base::Time::FromDoubleT(40);
+ const double value2 = 6.28;
+ metric.push_back(MetricInfo(data_time2, value2));
+ const base::Time data_time3 = base::Time::FromDoubleT(60);
+ const double value3 = 9.42;
+ metric.push_back(MetricInfo(data_time3, value3));
+
+ const base::Time results_time = base::Time::FromDoubleT(19);
+ const base::TimeDelta results_resolution = base::TimeDelta::FromSeconds(2);
+ const std::vector<MetricInfo> aggregated_metric =
+ util::AggregateMetric(metric, results_time, results_resolution);
+
+ // The first aggregation point is split between the first value and the second
+ // value. The second is split between the second and third. The third doesn't
+ // have any data after it so the aggregation is the same value.
+ ASSERT_EQ(3u, aggregated_metric.size());
+ ASSERT_EQ(results_time + 1 * results_resolution, aggregated_metric[0].time);
+ ASSERT_EQ((value1 + value2) / 2, aggregated_metric[0].value);
+ ASSERT_EQ(results_time + 11 * results_resolution, aggregated_metric[1].time);
+ ASSERT_EQ((value2 + value3) / 2, aggregated_metric[1].value);
+ ASSERT_EQ(results_time + 21 * results_resolution, aggregated_metric[2].time);
+ ASSERT_EQ(value3, aggregated_metric[2].value);
+}
+} // namespace performance_monitor