summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorliaoyuke <liaoyuke@chromium.org>2015-07-13 16:25:39 -0700
committerCommit bot <commit-bot@chromium.org>2015-07-13 23:26:15 +0000
commit55bcb47addbc50f753487a73fb13ed5e370ae169 (patch)
tree77a31fc3413e13752058379923cc3eb9b9809a76
parent3a8d7ac6bef0ac0609ba5552fe1cf393230466dd (diff)
downloadchromium_src-55bcb47addbc50f753487a73fb13ed5e370ae169.zip
chromium_src-55bcb47addbc50f753487a73fb13ed5e370ae169.tar.gz
chromium_src-55bcb47addbc50f753487a73fb13ed5e370ae169.tar.bz2
Added image pattern comparison logic including unit tests.
The Image pattern comparison logic is: calculate the average color of the expected rect region on the current frame and compare it to the expected color to see if it matches. BUG= Review URL: https://codereview.chromium.org/1219923011 Cr-Commit-Position: refs/heads/master@{#338588}
-rw-r--r--remoting/test/app_remoting_connected_client_fixture.cc1
-rw-r--r--remoting/test/app_remoting_connection_helper.cc7
-rw-r--r--remoting/test/app_remoting_connection_helper.h3
-rw-r--r--remoting/test/app_remoting_latency_test_fixture.cc9
-rw-r--r--remoting/test/app_remoting_latency_test_fixture.h6
-rw-r--r--remoting/test/test_video_renderer.cc147
-rw-r--r--remoting/test/test_video_renderer.h14
-rw-r--r--remoting/test/test_video_renderer_unittest.cc284
8 files changed, 392 insertions, 79 deletions
diff --git a/remoting/test/app_remoting_connected_client_fixture.cc b/remoting/test/app_remoting_connected_client_fixture.cc
index f624f92..adc22d3 100644
--- a/remoting/test/app_remoting_connected_client_fixture.cc
+++ b/remoting/test/app_remoting_connected_client_fixture.cc
@@ -87,7 +87,6 @@ bool AppRemotingConnectedClientFixture::VerifyResponseForSimpleHostMessage(
run_loop_->Run();
timer_->Stop();
- connection_helper_->ResetHostMessageReceivedCallback();
return message_received;
}
diff --git a/remoting/test/app_remoting_connection_helper.cc b/remoting/test/app_remoting_connection_helper.cc
index 14bbde2..83b92ca 100644
--- a/remoting/test/app_remoting_connection_helper.cc
+++ b/remoting/test/app_remoting_connection_helper.cc
@@ -4,6 +4,7 @@
#include "remoting/test/app_remoting_connection_helper.h"
+#include "base/callback_helpers.h"
#include "base/json/json_reader.h"
#include "base/logging.h"
#include "base/run_loop.h"
@@ -53,10 +54,6 @@ void AppRemotingConnectionHelper::SetHostMessageReceivedCallback(
host_message_received_callback_ = host_message_received_callback;
}
-void AppRemotingConnectionHelper::ResetHostMessageReceivedCallback() {
- host_message_received_callback_.Reset();
-}
-
bool AppRemotingConnectionHelper::StartConnection() {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(client_);
@@ -148,7 +145,7 @@ void AppRemotingConnectionHelper::HostMessageReceived(
// If a callback is not registered, then the message is passed to a default
// handler for the class based on the message type.
if (!host_message_received_callback_.is_null()) {
- host_message_received_callback_.Run(message);
+ base::ResetAndReturn(&host_message_received_callback_).Run(message);
} else if (message.type() == "onWindowAdded") {
HandleOnWindowAddedMessage(message);
} else {
diff --git a/remoting/test/app_remoting_connection_helper.h b/remoting/test/app_remoting_connection_helper.h
index 6f39e82..dd6dad3 100644
--- a/remoting/test/app_remoting_connection_helper.h
+++ b/remoting/test/app_remoting_connection_helper.h
@@ -55,9 +55,6 @@ class AppRemotingConnectionHelper
void SetHostMessageReceivedCallback(
HostMessageReceivedCallback host_message_received_callback);
- // Reset |host_message_received_callback_ to null|.
- void ResetHostMessageReceivedCallback();
-
private:
// RemoteConnectionObserver interface.
void ConnectionStateChanged(protocol::ConnectionToHost::State state,
diff --git a/remoting/test/app_remoting_latency_test_fixture.cc b/remoting/test/app_remoting_latency_test_fixture.cc
index 775a114..8723535 100644
--- a/remoting/test/app_remoting_latency_test_fixture.cc
+++ b/remoting/test/app_remoting_latency_test_fixture.cc
@@ -51,16 +51,15 @@ void AppRemotingLatencyTestFixture::TearDown() {
void AppRemotingLatencyTestFixture::SetExpectedImagePattern(
const webrtc::DesktopRect& expected_rect,
- const RgbaColor& expected_color) {
+ uint32_t expected_color) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!run_loop_ || !run_loop_->running());
run_loop_.reset(new base::RunLoop());
base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE,
- base::Bind(&TestVideoRenderer::SetImagePatternAndMatchedCallback,
- test_video_renderer_, expected_rect, expected_color,
- run_loop_->QuitClosure()));
+ FROM_HERE, base::Bind(&TestVideoRenderer::ExpectAverageColorInRect,
+ test_video_renderer_, expected_rect, expected_color,
+ run_loop_->QuitClosure()));
}
bool AppRemotingLatencyTestFixture::WaitForImagePatternMatched(
diff --git a/remoting/test/app_remoting_latency_test_fixture.h b/remoting/test/app_remoting_latency_test_fixture.h
index 1cebe84..bc2572a 100644
--- a/remoting/test/app_remoting_latency_test_fixture.h
+++ b/remoting/test/app_remoting_latency_test_fixture.h
@@ -30,8 +30,6 @@ struct RemoteApplicationDetails;
class AppRemotingConnectionHelper;
class TestVideoRenderer;
-typedef uint32 RgbaColor;
-
// Creates a connection to a remote host which is available for tests to use.
// Provides convenient methods to create test cases to measure the input and
// rendering latency between client and the remote host.
@@ -46,7 +44,7 @@ class AppRemotingLatencyTestFixture : public testing::Test {
// Set expected image pattern for comparison and a matched reply will be
// called when the pattern is matched.
void SetExpectedImagePattern(const webrtc::DesktopRect& expected_rect,
- const RgbaColor& expected_color);
+ uint32_t expected_avg_color);
// Waits for an image pattern matched reply up to |max_wait_time|. Returns
// true if we received a response within the maximum time limit.
@@ -54,7 +52,7 @@ class AppRemotingLatencyTestFixture : public testing::Test {
bool WaitForImagePatternMatched(const base::TimeDelta& max_wait_time);
// Name of the application being tested.
- // NOTE: must be initialized in the constructor of derived class.
+ // NOTE: Must be initialized in the constructor of derived class.
std::string application_name_;
private:
diff --git a/remoting/test/test_video_renderer.cc b/remoting/test/test_video_renderer.cc
index 75dd2c9..bd806c8 100644
--- a/remoting/test/test_video_renderer.cc
+++ b/remoting/test/test_video_renderer.cc
@@ -4,7 +4,10 @@
#include "remoting/test/test_video_renderer.h"
+#include <cmath>
+
#include "base/bind.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/synchronization/lock.h"
#include "base/thread_task_runner_handle.h"
@@ -15,6 +18,27 @@
#include "remoting/proto/video.pb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
+namespace {
+
+// Used to store a RGB color, and it can be converted from uint32_t.
+struct RGBValue {
+ RGBValue(int r, int g, int b) : red(r), green(g), blue(b) {}
+
+ int red;
+ int green;
+ int blue;
+};
+
+// Convert an uint32_t to a RGBValue.
+RGBValue ConvertUint32ToRGBValue(uint32_t color) {
+ RGBValue rgb_value((color >> 16) & 0xFF, (color >> 8) & 0xFF, color & 0xFF);
+ return rgb_value;
+}
+
+// Used to account for frame resizing and lossy encoding error in percentage.
+const double kMaxColorError = 0.02;
+} // namespace
+
namespace remoting {
namespace test {
@@ -34,17 +58,25 @@ class TestVideoRenderer::Core {
// Initialize a decoder to decode video packets.
void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec);
- // Returns a copy of the current buffer.
- scoped_ptr<webrtc::DesktopFrame> GetBufferForTest() const;
+ // Returns a copy of the current frame.
+ scoped_ptr<webrtc::DesktopFrame> GetCurrentFrameForTest() const;
// Set expected image pattern for comparison and the callback will be called
// when the pattern is matched.
- void SetImagePatternAndMatchedCallback(
+ void ExpectAverageColorInRect(
const webrtc::DesktopRect& expected_rect,
- const RgbaColor& expected_color,
+ uint32_t expected_avg_color,
const base::Closure& image_pattern_matched_callback);
private:
+ // Returns average color of pixels fall within |rect| on the current frame.
+ RGBValue CalculateAverageColorValue(const webrtc::DesktopRect& rect) const;
+
+ // Compares |candidate_avg_value| to |expected_avg_color_|.
+ // Returns true if the root mean square of the errors in the R, G and B
+ // components does not exceed a given limit.
+ bool ExpectedAverageColorIsMatched(const RGBValue& candidate_avg_value) const;
+
// Used to ensure Core methods are called on the same thread.
base::ThreadChecker thread_checker_;
@@ -61,18 +93,14 @@ class TestVideoRenderer::Core {
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
// Used to store decoded video frame.
- scoped_ptr<webrtc::DesktopFrame> buffer_;
+ scoped_ptr<webrtc::DesktopFrame> frame_;
- // Protects access to |buffer_|.
+ // Protects access to |frame_|.
mutable base::Lock lock_;
// Used to store the expected image pattern.
webrtc::DesktopRect expected_rect_;
- RgbaColor expected_color_;
-
- // Maintains accumulating image pattern.
- webrtc::DesktopRect accumulating_rect_;
- RgbaColor accumulating_color_;
+ uint32_t expected_avg_color_;
// Used to store the callback when expected pattern is matched.
base::Closure image_pattern_matched_callback_;
@@ -124,10 +152,10 @@ void TestVideoRenderer::Core::SetCodecForDecoding(
}
scoped_ptr<webrtc::DesktopFrame>
- TestVideoRenderer::Core::GetBufferForTest() const {
+TestVideoRenderer::Core::GetCurrentFrameForTest() const {
base::AutoLock auto_lock(lock_);
- DCHECK(buffer_);
- return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*buffer_.get()));
+ DCHECK(frame_);
+ return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*frame_.get()));
}
void TestVideoRenderer::Core::ProcessVideoPacket(
@@ -147,7 +175,7 @@ void TestVideoRenderer::Core::ProcessVideoPacket(
if (!screen_size_.equals(source_size)) {
screen_size_ = source_size;
decoder_->Initialize(screen_size_);
- buffer_.reset(new webrtc::BasicDesktopFrame(screen_size_));
+ frame_.reset(new webrtc::BasicDesktopFrame(screen_size_));
}
}
@@ -166,28 +194,84 @@ void TestVideoRenderer::Core::ProcessVideoPacket(
// previous video frame.
decoder_->RenderFrame(screen_size_,
webrtc::DesktopRect::MakeWH(screen_size_.width(),
- screen_size_.height()), buffer_->data(),
- buffer_->stride(), &updated_region_);
+ screen_size_.height()),
+ frame_->data(), frame_->stride(), &updated_region_);
}
main_task_runner_->PostTask(FROM_HERE, done);
- // TODO(liaoyuke): Update |accumulating_rect_| and |accumulating_color_|, then
- // compare to the expected image pattern to check whether the pattern is
- // matched or not and update |image_pattern_matched| accordingly.
+ // Check to see if a image pattern matched reply is passed in, and whether
+ // the |expected_rect_| falls within the current frame.
+ if (image_pattern_matched_callback_.is_null() ||
+ expected_rect_.right() > frame_->size().width() ||
+ expected_rect_.bottom() > frame_->size().height()) {
+ return;
+ }
+
+ // Compare the expected image pattern with the corresponding rectangle region
+ // on the current frame.
+ RGBValue accumulating_avg_value = CalculateAverageColorValue(expected_rect_);
+ VLOG(2) << accumulating_avg_value.red << " " << accumulating_avg_value.green
+ << " " << accumulating_avg_value.blue;
+
+ if (ExpectedAverageColorIsMatched(accumulating_avg_value)) {
+ main_task_runner_->PostTask(
+ FROM_HERE, base::ResetAndReturn(&image_pattern_matched_callback_));
+ }
}
-void TestVideoRenderer::Core::SetImagePatternAndMatchedCallback(
+void TestVideoRenderer::Core::ExpectAverageColorInRect(
const webrtc::DesktopRect& expected_rect,
- const RgbaColor& expected_color,
+ uint32_t expected_avg_color,
const base::Closure& image_pattern_matched_callback) {
DCHECK(thread_checker_.CalledOnValidThread());
expected_rect_ = expected_rect;
- expected_color_ = expected_color;
+ expected_avg_color_ = expected_avg_color;
image_pattern_matched_callback_ = image_pattern_matched_callback;
}
+RGBValue TestVideoRenderer::Core::CalculateAverageColorValue(
+ const webrtc::DesktopRect& rect) const {
+ int red_sum = 0;
+ int green_sum = 0;
+ int blue_sum = 0;
+
+ // Loop through pixels that fall within |accumulating_rect_| to obtain the
+ // average color value.
+ for (int y = rect.top(); y < rect.bottom(); ++y) {
+ uint8_t* frame_pos =
+ frame_->data() + (y * frame_->stride() +
+ rect.left() * webrtc::DesktopFrame::kBytesPerPixel);
+
+ // Pixels of decoded video frame are presented in ARGB format.
+ for (int x = 0; x < rect.width(); ++x) {
+ red_sum += frame_pos[2];
+ green_sum += frame_pos[1];
+ blue_sum += frame_pos[0];
+ frame_pos += 4;
+ }
+ }
+
+ int area = rect.width() * rect.height();
+ RGBValue rgb_value(red_sum / area, green_sum / area, blue_sum / area);
+ return rgb_value;
+}
+
+bool TestVideoRenderer::Core::ExpectedAverageColorIsMatched(
+ const RGBValue& candidate_avg_value) const {
+ RGBValue expected_avg_value = ConvertUint32ToRGBValue(expected_avg_color_);
+ double error_sum_squares = 0;
+ double red_error = expected_avg_value.red - candidate_avg_value.red;
+ double green_error = expected_avg_value.green - candidate_avg_value.green;
+ double blue_error = expected_avg_value.blue - candidate_avg_value.blue;
+ error_sum_squares = red_error * red_error + green_error * green_error +
+ blue_error * blue_error;
+ error_sum_squares /= (255.0 * 255.0);
+
+ return sqrt(error_sum_squares / 3) < kMaxColorError;
+}
+
TestVideoRenderer::TestVideoRenderer()
: video_decode_thread_(
new base::Thread("TestVideoRendererVideoDecodingThread")),
@@ -267,23 +351,26 @@ void TestVideoRenderer::SetCodecForDecoding(
codec));
}
-scoped_ptr<webrtc::DesktopFrame> TestVideoRenderer::GetBufferForTest() const {
+scoped_ptr<webrtc::DesktopFrame> TestVideoRenderer::GetCurrentFrameForTest()
+ const {
DCHECK(thread_checker_.CalledOnValidThread());
- return core_->GetBufferForTest();
+ return core_->GetCurrentFrameForTest();
}
-void TestVideoRenderer::SetImagePatternAndMatchedCallback(
+void TestVideoRenderer::ExpectAverageColorInRect(
const webrtc::DesktopRect& expected_rect,
- const RgbaColor& expected_color,
+ uint32_t expected_avg_color,
const base::Closure& image_pattern_matched_callback) {
DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(!expected_rect.is_empty()) << "Expected rect cannot be empty";
DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called";
video_decode_task_runner_->PostTask(
- FROM_HERE, base::Bind(&Core::SetImagePatternAndMatchedCallback,
- base::Unretained(core_.get()), expected_rect,
- expected_color, image_pattern_matched_callback));
+ FROM_HERE,
+ base::Bind(&Core::ExpectAverageColorInRect, base::Unretained(core_.get()),
+ expected_rect, expected_avg_color,
+ image_pattern_matched_callback));
}
} // namespace test
diff --git a/remoting/test/test_video_renderer.h b/remoting/test/test_video_renderer.h
index b9a84f7..a6cca37 100644
--- a/remoting/test/test_video_renderer.h
+++ b/remoting/test/test_video_renderer.h
@@ -23,8 +23,6 @@ class DesktopFrame;
class DesktopRect;
}
-typedef uint32 RgbaColor;
-
namespace remoting {
namespace test {
@@ -49,19 +47,19 @@ class TestVideoRenderer : public VideoRenderer, public protocol::VideoStub {
// Initialize a decoder to decode video packets.
void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec);
- // Returns a copy of the current buffer.
- scoped_ptr<webrtc::DesktopFrame> GetBufferForTest() const;
+ // Returns a copy of the current frame.
+ scoped_ptr<webrtc::DesktopFrame> GetCurrentFrameForTest() const;
// Gets a weak pointer for this object.
base::WeakPtr<TestVideoRenderer> GetWeakPtr() {
return weak_factory_.GetWeakPtr();
}
- // Set expected image pattern for comparison and the callback will be called
- // when the pattern is matched.
- void SetImagePatternAndMatchedCallback(
+ // Set expected rect and average color for comparison and the callback will be
+ // called when the pattern is matched.
+ void ExpectAverageColorInRect(
const webrtc::DesktopRect& expected_rect,
- const RgbaColor& expected_color,
+ uint32_t expected_average_color,
const base::Closure& image_pattern_matched_callback);
private:
diff --git a/remoting/test/test_video_renderer_unittest.cc b/remoting/test/test_video_renderer_unittest.cc
index 8c1d92f..d30fd2c 100644
--- a/remoting/test/test_video_renderer_unittest.cc
+++ b/remoting/test/test_video_renderer_unittest.cc
@@ -9,6 +9,7 @@
#include "base/memory/scoped_vector.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
+#include "base/thread_task_runner_handle.h"
#include "base/timer/timer.h"
#include "media/base/video_frame.h"
#include "remoting/codec/video_encoder.h"
@@ -20,12 +21,25 @@
#include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
namespace {
-const int kBytesPerPixel = 4;
-const int kDefaultScreenWidth = 1024;
-const int kDefaultScreenHeight = 768;
-const double kDefaultErrorLimit = 0.02;
+
+// Used to verify if image pattern is matched.
+void ProcessPacketDoneHandler(const base::Closure& done_closure,
+ bool* handler_called) {
+ *handler_called = true;
+ done_closure.Run();
}
+const int kDefaultScreenWidthPx = 1024;
+const int kDefaultScreenHeightPx = 768;
+
+// Default max error for encoding and decoding, measured in percent.
+const double kDefaultErrorLimit = 0.02;
+
+// Default expected rect for image pattern, measured in pixels.
+const webrtc::DesktopRect kDefaultExpectedRect =
+ webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200);
+} // namespace
+
namespace remoting {
namespace test {
@@ -36,15 +50,17 @@ class TestVideoRendererTest : public testing::Test {
TestVideoRendererTest();
~TestVideoRendererTest() override;
- // Generate a frame containing a gradient and test decoding of
- // TestVideoRenderer. The original frame is compared to the one obtained from
- // decoding the video packet, and the error at each pixel is the root mean
- // square of the errors in the R, G and B components, each normalized to
- // [0, 1]. This routine checks that the mean error over all pixels do not
- // exceed a given limit.
+ // Handles creating a frame and sending to TestVideoRenderer for processing.
void TestVideoPacketProcessing(int screen_width, int screen_height,
double error_limit);
+ // Handles setting an image pattern and sending a frame to TestVideoRenderer.
+ // |expect_to_match| indicates if the image pattern is expected to match.
+ void TestImagePatternMatch(int screen_width,
+ int screen_height,
+ const webrtc::DesktopRect& expected_rect,
+ bool expect_to_match);
+
// Generate a basic desktop frame containing a gradient.
scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient(
int screen_width, int screen_height) const;
@@ -66,7 +82,21 @@ class TestVideoRendererTest : public testing::Test {
// testing::Test interface.
void SetUp() override;
- // return the mean error of two frames.
+ // Set image pattern, send video packet and returns if the expected pattern is
+ // matched.
+ bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet,
+ const webrtc::DesktopRect& expected_rect,
+ uint32_t expected_average_color);
+
+ // Returns the average color value of pixels fall within |rect|.
+ // NOTE: Callers should not release the objects.
+ uint32_t CalculateAverageColorValueForFrame(
+ const webrtc::DesktopFrame* frame,
+ const webrtc::DesktopRect& rect) const;
+
+ // Return the mean error of two frames over all pixels, where error at each
+ // pixel is the root mean square of the errors in the R, G and B components,
+ // each normalized to [0, 1].
double CalculateError(const webrtc::DesktopFrame* original_frame,
const webrtc::DesktopFrame* decoded_frame) const;
@@ -99,25 +129,153 @@ void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width,
DCHECK(encoder_);
DCHECK(test_video_renderer_);
+ // Generate a frame containing a gradient.
scoped_ptr<webrtc::DesktopFrame> original_frame =
CreateDesktopFrameWithGradient(screen_width, screen_height);
EXPECT_TRUE(original_frame);
+
scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get());
+
DCHECK(!run_loop_ || !run_loop_->running());
+ DCHECK(!timer_->IsRunning());
run_loop_.reset(new base::RunLoop());
+ // Set an extremely long time: 10 min to prevent bugs from hanging the system.
+ // NOTE: We've seen cases which take up to 1 min to process a packet, so an
+ // extremely long time as 10 min is chosen to avoid being variable/flaky.
+ timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10),
+ run_loop_->QuitClosure());
+
// Wait for the video packet to be processed and rendered to buffer.
test_video_renderer_->ProcessVideoPacket(packet.Pass(),
run_loop_->QuitClosure());
+
run_loop_->Run();
+ EXPECT_TRUE(timer_->IsRunning());
+ timer_->Stop();
+ run_loop_.reset();
scoped_ptr<webrtc::DesktopFrame> buffer_copy =
- test_video_renderer_->GetBufferForTest();
+ test_video_renderer_->GetCurrentFrameForTest();
EXPECT_NE(buffer_copy, nullptr);
+
+ // The original frame is compared to the decoded video frame to check that
+ // the mean error over all pixels does not exceed a given limit.
double error = CalculateError(original_frame.get(), buffer_copy.get());
EXPECT_LT(error, error_limit);
}
+bool TestVideoRendererTest::SendPacketAndWaitForMatch(
+ scoped_ptr<VideoPacket> packet,
+ const webrtc::DesktopRect& expected_rect,
+ uint32_t expected_average_color) {
+ DCHECK(!run_loop_ || !run_loop_->running());
+ DCHECK(!timer_->IsRunning());
+ run_loop_.reset(new base::RunLoop());
+
+ // Set an extremely long time: 10 min to prevent bugs from hanging the system.
+ // NOTE: We've seen cases which take up to 1 min to process a packet, so an
+ // extremely long time as 10 min is chosen to avoid being variable/flaky.
+ timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10),
+ run_loop_->QuitClosure());
+
+ // Set expected image pattern.
+ test_video_renderer_->ExpectAverageColorInRect(
+ expected_rect, expected_average_color, run_loop_->QuitClosure());
+
+ // Used to verify if the expected image pattern will be matched by |packet|.
+ scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get()));
+
+ // Post first test packet: |packet|.
+ test_video_renderer_->ProcessVideoPacket(packet.Pass(),
+ base::Bind(&base::DoNothing));
+
+ // Second packet: |packet_copy| is posted, and |second_packet_done_callback|
+ // will always be posted back to main thread, however, whether it will be
+ // called depends on whether the expected pattern is matched or not.
+ bool second_packet_done_is_called = false;
+ base::Closure second_packet_done_callback =
+ base::Bind(&ProcessPacketDoneHandler, run_loop_->QuitClosure(),
+ &second_packet_done_is_called);
+
+ test_video_renderer_->ProcessVideoPacket(packet_copy.Pass(),
+ second_packet_done_callback);
+
+ run_loop_->Run();
+ EXPECT_TRUE(timer_->IsRunning());
+ timer_->Stop();
+ run_loop_.reset();
+
+ // if expected image pattern is matched, the QuitClosure of |run_loop_| will
+ // be called before |second_packet_done_callback|, which leaves
+ // |second_packet_done_is_called| be false.
+ bool image_pattern_is_matched = !second_packet_done_is_called;
+
+ return image_pattern_is_matched;
+}
+
+void TestVideoRendererTest::TestImagePatternMatch(
+ int screen_width,
+ int screen_height,
+ const webrtc::DesktopRect& expected_rect,
+ bool expect_to_match) {
+ DCHECK(encoder_);
+ DCHECK(test_video_renderer_);
+
+ scoped_ptr<webrtc::DesktopFrame> frame =
+ CreateDesktopFrameWithGradient(screen_width, screen_height);
+ uint32_t expected_average_color =
+ CalculateAverageColorValueForFrame(frame.get(), expected_rect);
+ scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get());
+
+ if (expect_to_match) {
+ EXPECT_TRUE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect,
+ expected_average_color));
+ } else {
+ // Shift each channel by 128.
+ // e.g. (10, 127, 200) -> (138, 255, 73).
+ // In this way, the error between expected color and true value is always
+ // around 0.5.
+ int red_shift = (((expected_average_color >> 16) & 0xFF) + 128) % 255;
+ int green_shift = (((expected_average_color >> 8) & 0xFF) + 128) % 255;
+ int blue_shift = ((expected_average_color & 0xFF) + 128) % 255;
+
+ int expected_average_color_shift =
+ 0xFF000000 | (red_shift << 16) | (green_shift << 8) | blue_shift;
+
+ EXPECT_FALSE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect,
+ expected_average_color_shift));
+ }
+}
+
+uint32_t TestVideoRendererTest::CalculateAverageColorValueForFrame(
+ const webrtc::DesktopFrame* frame,
+ const webrtc::DesktopRect& rect) const {
+ int red_sum = 0;
+ int green_sum = 0;
+ int blue_sum = 0;
+
+ // Loop through pixels that fall within |accumulating_rect_| to obtain the
+ // average color value.
+ for (int y = rect.top(); y < rect.bottom(); ++y) {
+ uint8_t* frame_pos =
+ frame->data() + (y * frame->stride() +
+ rect.left() * webrtc::DesktopFrame::kBytesPerPixel);
+
+ // Pixels of decoded video frame are presented in ARGB format.
+ for (int x = 0; x < rect.width(); ++x) {
+ red_sum += frame_pos[2];
+ green_sum += frame_pos[1];
+ blue_sum += frame_pos[0];
+ frame_pos += 4;
+ }
+ }
+
+ int area = rect.width() * rect.height();
+ return 0xFF000000 | ((red_sum / area) << 16) | ((green_sum / area) << 8) |
+ (blue_sum / area);
+}
+
double TestVideoRendererTest::CalculateError(
const webrtc::DesktopFrame* original_frame,
const webrtc::DesktopFrame* decoded_frame) const {
@@ -156,7 +314,7 @@ double TestVideoRendererTest::CalculateError(
for (int width = 0; width < screen_width; ++width) {
// Errors are calculated in the R, G, B components.
for (int j = 0; j < 3; ++j) {
- int offset = kBytesPerPixel * width + j;
+ int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j;
double original_value = static_cast<double>(*(original_ptr + offset));
double decoded_value = static_cast<double>(*(decoded_ptr + offset));
double error = original_value - decoded_value;
@@ -201,7 +359,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) {
encoder_ = VideoEncoderVpx::CreateForVP8();
test_video_renderer_->SetCodecForDecoding(
protocol::ChannelConfig::CODEC_VP8);
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight,
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
kDefaultErrorLimit);
}
@@ -210,7 +368,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) {
encoder_ = VideoEncoderVpx::CreateForVP9();
test_video_renderer_->SetCodecForDecoding(
protocol::ChannelConfig::CODEC_VP9);
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight,
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
kDefaultErrorLimit);
}
@@ -220,7 +378,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) {
encoder_.reset(new VideoEncoderVerbatim());
test_video_renderer_->SetCodecForDecoding(
protocol::ChannelConfig::CODEC_VERBATIM);
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight,
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
kDefaultErrorLimit);
}
@@ -238,8 +396,8 @@ TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) {
ScopedVector<VideoPacket> video_packets;
for (int i = 0; i < task_num; ++i) {
scoped_ptr<webrtc::DesktopFrame> original_frame =
- CreateDesktopFrameWithGradient(kDefaultScreenWidth,
- kDefaultScreenHeight);
+ CreateDesktopFrameWithGradient(kDefaultScreenWidthPx,
+ kDefaultScreenHeightPx);
video_packets.push_back(encoder_->Encode(*original_frame.get()));
}
@@ -258,14 +416,94 @@ TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) {
test_video_renderer_->SetCodecForDecoding(
protocol::ChannelConfig::Codec::CODEC_VP8);
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight,
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
kDefaultErrorLimit);
- TestVideoPacketProcessing(2 * kDefaultScreenWidth, 2 * kDefaultScreenHeight,
- kDefaultErrorLimit);
+ TestVideoPacketProcessing(2 * kDefaultScreenWidthPx,
+ 2 * kDefaultScreenHeightPx, kDefaultErrorLimit);
- TestVideoPacketProcessing(kDefaultScreenWidth / 2, kDefaultScreenHeight / 2,
- kDefaultErrorLimit);
+ TestVideoPacketProcessing(kDefaultScreenWidthPx / 2,
+ kDefaultScreenHeightPx / 2, kDefaultErrorLimit);
+}
+
+// Verify setting expected image pattern doesn't break video packet processing.
+TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) {
+ encoder_ = VideoEncoderVpx::CreateForVP8();
+ test_video_renderer_->SetCodecForDecoding(
+ protocol::ChannelConfig::Codec::CODEC_VP8);
+
+ DCHECK(encoder_);
+ DCHECK(test_video_renderer_);
+
+ scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient(
+ kDefaultScreenWidthPx, kDefaultScreenHeightPx);
+
+ // Since we don't care whether expected image pattern is matched or not in
+ // this case, an expected color is chosen arbitrarily.
+ uint32_t black_color = 0xFF000000;
+
+ // Set expected image pattern.
+ test_video_renderer_->ExpectAverageColorInRect(
+ kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing));
+
+ // Post test video packet.
+ scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get());
+ test_video_renderer_->ProcessVideoPacket(packet.Pass(),
+ base::Bind(&base::DoNothing));
+}
+
+// Verify correct image pattern can be matched for VP8.
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) {
+ encoder_ = VideoEncoderVpx::CreateForVP8();
+ test_video_renderer_->SetCodecForDecoding(
+ protocol::ChannelConfig::Codec::CODEC_VP8);
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
+ kDefaultExpectedRect, true);
+}
+
+// Verify expected image pattern can be matched for VP9.
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) {
+ encoder_ = VideoEncoderVpx::CreateForVP9();
+ test_video_renderer_->SetCodecForDecoding(
+ protocol::ChannelConfig::Codec::CODEC_VP9);
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
+ kDefaultExpectedRect, true);
+}
+
+// Verify expected image pattern can be matched for VERBATIM.
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) {
+ encoder_.reset(new VideoEncoderVerbatim());
+ test_video_renderer_->SetCodecForDecoding(
+ protocol::ChannelConfig::Codec::CODEC_VERBATIM);
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
+ kDefaultExpectedRect, true);
+}
+
+// Verify incorrect image pattern shouldn't be matched for VP8.
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) {
+ encoder_ = VideoEncoderVpx::CreateForVP8();
+ test_video_renderer_->SetCodecForDecoding(
+ protocol::ChannelConfig::Codec::CODEC_VP8);
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
+ kDefaultExpectedRect, false);
+}
+
+// Verify incorrect image pattern shouldn't be matched for VP9.
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) {
+ encoder_ = VideoEncoderVpx::CreateForVP9();
+ test_video_renderer_->SetCodecForDecoding(
+ protocol::ChannelConfig::Codec::CODEC_VP9);
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx,
+ kDefaultExpectedRect, false);
+}
+
+// Verify incorrect image pattern shouldn't be matched for VERBATIM.
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) {
+ encoder_.reset(new VideoEncoderVerbatim());
+ test_video_renderer_->SetCodecForDecoding(
+ protocol::ChannelConfig::Codec::CODEC_VERBATIM);
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
+ kDefaultExpectedRect, false);
}
} // namespace test