summaryrefslogtreecommitdiffstats
path: root/gpu
diff options
context:
space:
mode:
authordcastagna <dcastagna@chromium.org>2015-02-23 21:24:47 -0800
committerCommit bot <commit-bot@chromium.org>2015-02-24 05:25:28 +0000
commit7396d547d657957fe50469cfbb08619baed406b8 (patch)
treefa0b4589e2d62a22ac9bbddd4d9479a47baefbad /gpu
parent5d5cf5d9155657b08940f23f791215292d68a066 (diff)
downloadchromium_src-7396d547d657957fe50469cfbb08619baed406b8.zip
chromium_src-7396d547d657957fe50469cfbb08619baed406b8.tar.gz
chromium_src-7396d547d657957fe50469cfbb08619baed406b8.tar.bz2
gpu: Add support for gpu_perftests for android.
This cl adds a gpu_perftests_apk target to build an apk for android. It also adds a check that detects if the timings are valid and discards them if they're not. The number of valid runs (where we could get timings) is printed out along the other perf results as "sample_runs". BUG=423481 Review URL: https://codereview.chromium.org/944073005 Cr-Commit-Position: refs/heads/master@{#317756}
Diffstat (limited to 'gpu')
-rw-r--r--gpu/gpu.gyp28
-rw-r--r--gpu/perftests/texture_upload_perftest.cc38
2 files changed, 53 insertions, 13 deletions
diff --git a/gpu/gpu.gyp b/gpu/gpu.gyp
index 70e99c8..2bb7465 100644
--- a/gpu/gpu.gyp
+++ b/gpu/gpu.gyp
@@ -298,6 +298,23 @@
'perftests/run_all_tests.cc',
'perftests/texture_upload_perftest.cc',
],
+ 'conditions': [
+ ['OS == "android"',
+ {
+ 'dependencies': [
+ '../testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }
+ ],
+ # See http://crbug.com/162998#c4 for why this is needed.
+ ['OS=="linux" and use_allocator!="none"',
+ {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }
+ ],
+ ],
},
{
# GN version: //gpu:gl_tests
@@ -668,6 +685,17 @@
},
'includes': [ '../build/apk_test.gypi' ],
},
+ {
+ 'target_name': 'gpu_perftests_apk',
+ 'type': 'none',
+ 'dependencies': [
+ 'gpu_perftests',
+ ],
+ 'variables': {
+ 'test_suite_name': 'gpu_perftests',
+ },
+ 'includes': [ '../build/apk_test.gypi' ],
+ },
],
}],
['OS == "win"', {
diff --git a/gpu/perftests/texture_upload_perftest.cc b/gpu/perftests/texture_upload_perftest.cc
index 2e9ef6c..5f15658 100644
--- a/gpu/perftests/texture_upload_perftest.cc
+++ b/gpu/perftests/texture_upload_perftest.cc
@@ -12,6 +12,7 @@
#include "gpu/command_buffer/service/gpu_timing.h"
#include "gpu/perftests/measurements.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "testing/perf/perf_test.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_context.h"
@@ -232,10 +233,14 @@ class TextureUploadPerfTest : public testing::Test {
EXPECT_EQ(pixels, pixels_rendered);
std::vector<Measurement> measurements;
- measurements.push_back(total_timers.GetAsMeasurement("total"));
- measurements.push_back(tex_timers.GetAsMeasurement("teximage2d"));
- measurements.push_back(draw_timers.GetAsMeasurement("drawarrays"));
- measurements.push_back(finish_timers.GetAsMeasurement("finish"));
+ bool gpu_timer_errors =
+ gpu_timing_.IsAvailable() && gpu_timing_.CheckAndResetTimerErrors();
+ if (!gpu_timer_errors) {
+ measurements.push_back(total_timers.GetAsMeasurement("total"));
+ measurements.push_back(tex_timers.GetAsMeasurement("teximage2d"));
+ measurements.push_back(draw_timers.GetAsMeasurement("drawarrays"));
+ measurements.push_back(finish_timers.GetAsMeasurement("finish"));
+ }
return measurements;
}
@@ -259,22 +264,29 @@ TEST_F(TextureUploadPerfTest, glTexImage2d) {
std::vector<uint8> pixels;
base::SmallMap<std::map<std::string, Measurement>>
aggregates; // indexed by name
+ int successful_runs = 0;
for (int i = 0; i < kUploadPerfWarmupRuns + kUploadPerfTestRuns; ++i) {
GenerateTextureData(size_, i + 1, &pixels);
auto run = UploadAndDraw(pixels, GL_RGBA, GL_UNSIGNED_BYTE);
- if (i >= kUploadPerfWarmupRuns) {
- for (const Measurement& m : run) {
- auto& agg = aggregates[m.name];
- agg.name = m.name;
- agg.Increment(m);
- }
+ if (i < kUploadPerfWarmupRuns || !run.size()) {
+ continue;
+ }
+ successful_runs++;
+ for (const Measurement& measurement : run) {
+ auto& aggregate = aggregates[measurement.name];
+ aggregate.name = measurement.name;
+ aggregate.Increment(measurement);
}
}
- for (const auto& entry : aggregates) {
- const auto m = entry.second.Divide(kUploadPerfTestRuns);
- m.PrintResult();
+ if (successful_runs) {
+ for (const auto& entry : aggregates) {
+ const auto m = entry.second.Divide(successful_runs);
+ m.PrintResult();
+ }
}
+ perf_test::PrintResult("sample_runs", "", "",
+ static_cast<size_t>(successful_runs), "laps", true);
}
} // namespace