summaryrefslogtreecommitdiffstats
path: root/remoting
diff options
context:
space:
mode:
authorsergeyu <sergeyu@chromium.org>2015-08-19 19:02:18 -0700
committerCommit bot <commit-bot@chromium.org>2015-08-20 02:02:56 +0000
commit13bca69cfa261ec8eb6bffecf59c486b683eb8db (patch)
tree2a6c5c5217c4e148ff4c2e94c0e48a2cc2997014 /remoting
parent7a402ba51bf75c883cde14c8bd83fa082f685c08 (diff)
downloadchromium_src-13bca69cfa261ec8eb6bffecf59c486b683eb8db.zip
chromium_src-13bca69cfa261ec8eb6bffecf59c486b683eb8db.tar.gz
chromium_src-13bca69cfa261ec8eb6bffecf59c486b683eb8db.tar.bz2
remoting: Simplify FrameConsumer interface and remove FrameProducer interface.
Now FrameConsumer implementation is responsible for scaling. This makes interaction with FrameConsumer much simpler. Also removed FrameProducer interface as it's no longer needed. Currently video scaling is only used in the plugin when Graphics3D is not available. In that case Graphics2D::SetScale() performs better than trying to scale the image in the plugin, especially given that the scaling code is not optimized for PNaCl. This refactoring also allowed to simplify threading logic in the rendering both on Android and in the plugin. BUG=256850, 486917, 509914 Review URL: https://codereview.chromium.org/1288063004 Cr-Commit-Position: refs/heads/master@{#344404}
Diffstat (limited to 'remoting')
-rw-r--r--remoting/client/BUILD.gn1
-rw-r--r--remoting/client/frame_consumer.h27
-rw-r--r--remoting/client/frame_consumer_proxy.cc71
-rw-r--r--remoting/client/frame_consumer_proxy.h52
-rw-r--r--remoting/client/frame_producer.h54
-rw-r--r--remoting/client/jni/chromoting_jni_instance.cc72
-rw-r--r--remoting/client/jni/chromoting_jni_instance.h16
-rw-r--r--remoting/client/jni/chromoting_jni_runtime.cc5
-rw-r--r--remoting/client/jni/chromoting_jni_runtime.h3
-rw-r--r--remoting/client/jni/jni_frame_consumer.cc159
-rw-r--r--remoting/client/jni/jni_frame_consumer.h63
-rw-r--r--remoting/client/plugin/pepper_video_renderer_2d.cc366
-rw-r--r--remoting/client/plugin/pepper_video_renderer_2d.h93
-rw-r--r--remoting/client/software_video_renderer.cc360
-rw-r--r--remoting/client/software_video_renderer.h52
-rw-r--r--remoting/client/software_video_renderer_unittest.cc182
-rw-r--r--remoting/protocol/monitored_video_stub.cc4
-rw-r--r--remoting/protocol/session_config.cc8
-rw-r--r--remoting/protocol/session_config.h1
-rw-r--r--remoting/remoting_srcs.gypi3
-rw-r--r--remoting/remoting_test.gypi1
21 files changed, 560 insertions, 1033 deletions
diff --git a/remoting/client/BUILD.gn b/remoting/client/BUILD.gn
index 9ea04c4..e26fdbd 100644
--- a/remoting/client/BUILD.gn
+++ b/remoting/client/BUILD.gn
@@ -34,6 +34,7 @@ source_set("unit_tests") {
"normalizing_input_filter_cros_unittest.cc",
"normalizing_input_filter_mac_unittest.cc",
"server_log_entry_client_unittest.cc",
+ "software_video_renderer_unittest.cc",
"touch_input_scaler_unittest.cc",
]
diff --git a/remoting/client/frame_consumer.h b/remoting/client/frame_consumer.h
index ab6f16e..f320356 100644
--- a/remoting/client/frame_consumer.h
+++ b/remoting/client/frame_consumer.h
@@ -19,36 +19,17 @@ namespace remoting {
class FrameConsumer {
public:
-
// List of supported pixel formats needed by various platforms.
enum PixelFormat {
FORMAT_BGRA, // Used by the Pepper plugin.
FORMAT_RGBA, // Used for Android's Bitmap class.
};
- // Paints the contents of |buffer| into the area of the view identified
- // by |clip_area|. |view_size| specifies the full-frame dimensions to which
- // the |buffer|/|clip_area| portion was scaled. Implementations may be
- // optimized to only paint pixels within the intersection of |region| and
- // |clip_area|. If |shape| is non-NULL then it specifies the complete shape
- // of the frame, otherwise the frame is un-shaped.
- //
- // N.B. |clip_area|, |region| and |shape| should be provided in output view
- // coordinates.
- virtual void ApplyBuffer(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- const webrtc::DesktopRegion* shape) = 0;
-
- // Accepts a buffer that couldn't be used for drawing for any reason (shutdown
- // is in progress, the view area has changed, etc.). The accepted buffer can
- // be freed or reused for another drawing operation.
- virtual void ReturnBuffer(webrtc::DesktopFrame* buffer) = 0;
+ virtual scoped_ptr<webrtc::DesktopFrame> AllocateFrame(
+ const webrtc::DesktopSize& size) = 0;
- // Set the dimension of the entire host screen.
- virtual void SetSourceSize(const webrtc::DesktopSize& source_size,
- const webrtc::DesktopVector& dpi) = 0;
+ virtual void DrawFrame(scoped_ptr<webrtc::DesktopFrame> frame,
+ const base::Closure& done) = 0;
// Returns the preferred pixel encoding for the platform.
virtual PixelFormat GetPixelFormat() = 0;
diff --git a/remoting/client/frame_consumer_proxy.cc b/remoting/client/frame_consumer_proxy.cc
deleted file mode 100644
index 21d03bf..0000000
--- a/remoting/client/frame_consumer_proxy.cc
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "remoting/client/frame_consumer_proxy.h"
-
-#include "base/bind.h"
-#include "base/location.h"
-#include "base/single_thread_task_runner.h"
-#include "base/thread_task_runner_handle.h"
-#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
-#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
-#include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
-
-namespace remoting {
-
-FrameConsumerProxy::FrameConsumerProxy(
- const base::WeakPtr<FrameConsumer>& frame_consumer)
- : frame_consumer_(frame_consumer),
- task_runner_(base::ThreadTaskRunnerHandle::Get()) {
- pixel_format_ = frame_consumer_->GetPixelFormat();
-}
-
-static void DoApplyBuffer(base::WeakPtr<FrameConsumer> frame_consumer,
- const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- scoped_ptr<webrtc::DesktopRegion> shape) {
- if (!frame_consumer)
- return;
-
- frame_consumer->ApplyBuffer(view_size, clip_area, buffer, region,
- shape.get());
-}
-
-void FrameConsumerProxy::ApplyBuffer(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- const webrtc::DesktopRegion* shape) {
- scoped_ptr<webrtc::DesktopRegion> shape_ptr;
- if (shape)
- shape_ptr = make_scoped_ptr(new webrtc::DesktopRegion(*shape));
- task_runner_->PostTask(
- FROM_HERE,
- base::Bind(DoApplyBuffer, frame_consumer_, view_size, clip_area, buffer,
- region, base::Passed(&shape_ptr)));
-}
-
-void FrameConsumerProxy::ReturnBuffer(webrtc::DesktopFrame* buffer) {
- task_runner_->PostTask(FROM_HERE, base::Bind(&FrameConsumer::ReturnBuffer,
- frame_consumer_, buffer));
-}
-
-void FrameConsumerProxy::SetSourceSize(
- const webrtc::DesktopSize& source_size,
- const webrtc::DesktopVector& source_dpi) {
- task_runner_->PostTask(
- FROM_HERE, base::Bind(&FrameConsumer::SetSourceSize, frame_consumer_,
- source_size, source_dpi));
-}
-
-FrameConsumer::PixelFormat FrameConsumerProxy::GetPixelFormat() {
- return pixel_format_;
-}
-
-FrameConsumerProxy::~FrameConsumerProxy() {
-}
-
-} // namespace remoting
diff --git a/remoting/client/frame_consumer_proxy.h b/remoting/client/frame_consumer_proxy.h
deleted file mode 100644
index 35bc76a..0000000
--- a/remoting/client/frame_consumer_proxy.h
+++ /dev/null
@@ -1,52 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// FrameConsumerProxy is used to allow a FrameConsumer on the UI thread to be
-// invoked by a Decoder on the decoder thread. The Detach() method is used by
-// the proxy's owner before tearing down the FrameConsumer, to prevent any
-// further invokations reaching it.
-
-#ifndef REMOTING_CLIENT_FRAME_CONSUMER_PROXY_H_
-#define REMOTING_CLIENT_FRAME_CONSUMER_PROXY_H_
-
-#include "base/memory/weak_ptr.h"
-#include "remoting/client/frame_consumer.h"
-
-namespace base {
-class SingleThreadTaskRunner;
-} // namespace base
-
-namespace remoting {
-
-class FrameConsumerProxy : public FrameConsumer {
- public:
- // Constructs a FrameConsumer proxy which can be passed to another thread,
- // and will direct calls to |frame_consumer| on the thread from which the
- // proxy was constructed.
- FrameConsumerProxy(const base::WeakPtr<FrameConsumer>& frame_consumer);
- ~FrameConsumerProxy() override;
-
- // FrameConsumer implementation.
- void ApplyBuffer(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- const webrtc::DesktopRegion* shape) override;
- void ReturnBuffer(webrtc::DesktopFrame* buffer) override;
- void SetSourceSize(const webrtc::DesktopSize& source_size,
- const webrtc::DesktopVector& dpi) override;
- PixelFormat GetPixelFormat() override;
-
- private:
- base::WeakPtr<FrameConsumer> frame_consumer_;
-
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
- PixelFormat pixel_format_;
-
- DISALLOW_COPY_AND_ASSIGN(FrameConsumerProxy);
-};
-
-} // namespace remoting
-
-#endif // REMOTING_CLIENT_FRAME_CONSUMER_PROXY_H_
diff --git a/remoting/client/frame_producer.h b/remoting/client/frame_producer.h
deleted file mode 100644
index 2776eb7..0000000
--- a/remoting/client/frame_producer.h
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef REMOTING_CLIENT_FRAME_PRODUCER_H_
-#define REMOTING_CLIENT_FRAME_PRODUCER_H_
-
-#include "base/callback_forward.h"
-
-namespace webrtc {
-class DesktopFrame;
-class DesktopRect;
-class DesktopRegion;
-class DesktopSize;
-} // namespace webrtc
-
-namespace remoting {
-
-class FrameProducer {
- public:
- FrameProducer() {}
-
- // Adds an image buffer to the pool of pending buffers for subsequent drawing.
- // Once drawing is completed the buffer will be returned to the consumer via
- // the FrameConsumer::ApplyBuffer() call. Alternatively an empty buffer could
- // be returned via the FrameConsumer::ReturnBuffer() call.
- //
- // The passed buffer must be large enough to hold the whole clipping area.
- virtual void DrawBuffer(webrtc::DesktopFrame* buffer) = 0;
-
- // Requests repainting of the specified |region| of the frame as soon as
- // possible. |region| is specified in output coordinates relative to
- // the beginning of the frame.
- virtual void InvalidateRegion(const webrtc::DesktopRegion& region) = 0;
-
- // Requests returing of all pending buffers to the consumer via
- // FrameConsumer::ReturnBuffer() calls.
- virtual void RequestReturnBuffers(const base::Closure& done) = 0;
-
- // Notifies the producer of changes to the output view size or clipping area.
- // Implementations must cope with empty |view_size| or |clip_area|.
- virtual void SetOutputSizeAndClip(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area) = 0;
-
- protected:
- virtual ~FrameProducer() {}
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FrameProducer);
-};
-
-} // namespace remoting
-
-#endif // REMOTING_CLIENT_FRAME_PRODUCER_H_
diff --git a/remoting/client/jni/chromoting_jni_instance.cc b/remoting/client/jni/chromoting_jni_instance.cc
index dc5c4d7..9c0fae7 100644
--- a/remoting/client/jni/chromoting_jni_instance.cc
+++ b/remoting/client/jni/chromoting_jni_instance.cc
@@ -15,6 +15,7 @@
#include "remoting/client/client_status_logger.h"
#include "remoting/client/jni/android_keymap.h"
#include "remoting/client/jni/chromoting_jni_runtime.h"
+#include "remoting/client/jni/jni_frame_consumer.h"
#include "remoting/client/software_video_renderer.h"
#include "remoting/client/token_fetcher_proxy.h"
#include "remoting/protocol/chromium_port_allocator.h"
@@ -85,10 +86,9 @@ ChromotingJniInstance::ChromotingJniInstance(ChromotingJniRuntime* jni_runtime,
token_fetcher.Pass(), auth_methods));
// Post a task to start connection
- jni_runtime_->display_task_runner()->PostTask(
+ jni_runtime_->network_task_runner()->PostTask(
FROM_HERE,
- base::Bind(&ChromotingJniInstance::ConnectToHostOnDisplayThread,
- this));
+ base::Bind(&ChromotingJniInstance::ConnectToHostOnNetworkThread, this));
}
ChromotingJniInstance::~ChromotingJniInstance() {
@@ -105,23 +105,25 @@ ChromotingJniInstance::~ChromotingJniInstance() {
}
void ChromotingJniInstance::Disconnect() {
- if (!jni_runtime_->display_task_runner()->BelongsToCurrentThread()) {
- jni_runtime_->display_task_runner()->PostTask(
+ if (!jni_runtime_->network_task_runner()->BelongsToCurrentThread()) {
+ jni_runtime_->network_task_runner()->PostTask(
FROM_HERE,
base::Bind(&ChromotingJniInstance::Disconnect, this));
return;
}
- // This must be destroyed on the display thread before the producer is gone.
- view_.reset();
+ host_id_.clear();
- // The weak pointers must be invalidated on the same thread they were used.
- view_weak_factory_->InvalidateWeakPtrs();
+ stats_logging_enabled_ = false;
- jni_runtime_->network_task_runner()->PostTask(
- FROM_HERE,
- base::Bind(&ChromotingJniInstance::DisconnectFromHostOnNetworkThread,
- this));
+ // |client_| must be torn down before |signaling_|.
+ client_.reset();
+ client_status_logger_.reset();
+ video_renderer_.reset();
+ view_.reset();
+ authenticator_.reset();
+ signaling_.reset();
+ client_context_.reset();
}
void ChromotingJniInstance::FetchThirdPartyToken(
@@ -381,36 +383,17 @@ void ChromotingJniInstance::SetCursorShape(
jni_runtime_->UpdateCursorShape(shape);
}
-void ChromotingJniInstance::ConnectToHostOnDisplayThread() {
- DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread());
-
- view_.reset(new JniFrameConsumer(jni_runtime_, this));
- view_weak_factory_.reset(new base::WeakPtrFactory<JniFrameConsumer>(
- view_.get()));
- scoped_ptr<FrameConsumerProxy> frame_consumer =
- make_scoped_ptr(new FrameConsumerProxy(view_weak_factory_->GetWeakPtr()));
-
- jni_runtime_->network_task_runner()->PostTask(
- FROM_HERE,
- base::Bind(&ChromotingJniInstance::ConnectToHostOnNetworkThread, this,
- base::Passed(&frame_consumer)));
-}
-
-void ChromotingJniInstance::ConnectToHostOnNetworkThread(
- scoped_ptr<FrameConsumerProxy> frame_consumer) {
+void ChromotingJniInstance::ConnectToHostOnNetworkThread() {
DCHECK(jni_runtime_->network_task_runner()->BelongsToCurrentThread());
- DCHECK(frame_consumer);
jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
client_context_.reset(new ClientContext(jni_runtime_->network_task_runner()));
client_context_->Start();
- SoftwareVideoRenderer* renderer = new SoftwareVideoRenderer(
- client_context_->main_task_runner(),
- client_context_->decode_task_runner(), frame_consumer.Pass());
- view_->set_frame_producer(renderer);
- video_renderer_.reset(renderer);
+ view_.reset(new JniFrameConsumer(jni_runtime_));
+ video_renderer_.reset(new SoftwareVideoRenderer(
+ client_context_->decode_task_runner(), view_.get()));
client_.reset(new ChromotingClient(
client_context_.get(), this, video_renderer_.get(), nullptr));
@@ -441,22 +424,6 @@ void ChromotingJniInstance::ConnectToHostOnNetworkThread(
transport_factory.Pass(), host_jid_, capabilities_);
}
-void ChromotingJniInstance::DisconnectFromHostOnNetworkThread() {
- DCHECK(jni_runtime_->network_task_runner()->BelongsToCurrentThread());
-
- host_id_.clear();
-
- stats_logging_enabled_ = false;
-
- // |client_| must be torn down before |signaling_|.
- client_.reset();
- client_status_logger_.reset();
- video_renderer_.reset();
- authenticator_.reset();
- signaling_.reset();
- client_context_.reset();
-}
-
void ChromotingJniInstance::FetchSecret(
bool pairable,
const protocol::SecretFetchedCallback& callback) {
@@ -494,7 +461,6 @@ void ChromotingJniInstance::SendKeyEventInternal(int usb_key_code,
return;
}
-
protocol::KeyEvent event;
event.set_usb_keycode(usb_key_code);
event.set_pressed(key_down);
diff --git a/remoting/client/jni/chromoting_jni_instance.h b/remoting/client/jni/chromoting_jni_instance.h
index 2ce50e0..5511fef 100644
--- a/remoting/client/jni/chromoting_jni_instance.h
+++ b/remoting/client/jni/chromoting_jni_instance.h
@@ -14,8 +14,6 @@
#include "remoting/client/chromoting_client.h"
#include "remoting/client/client_context.h"
#include "remoting/client/client_user_interface.h"
-#include "remoting/client/frame_consumer_proxy.h"
-#include "remoting/client/jni/jni_frame_consumer.h"
#include "remoting/protocol/clipboard_stub.h"
#include "remoting/protocol/cursor_shape_stub.h"
#include "remoting/signaling/xmpp_signal_strategy.h"
@@ -27,9 +25,11 @@ class ClipboardEvent;
class CursorShapeInfo;
} // namespace protocol
+class ChromotingJniRuntime;
class ClientStatusLogger;
-class VideoRenderer;
+class JniFrameConsumer;
class TokenFetcherProxy;
+class VideoRenderer;
// ClientUserInterface that indirectly makes and receives JNI calls.
class ChromotingJniInstance
@@ -118,10 +118,7 @@ class ChromotingJniInstance
// This object is ref-counted, so it cleans itself up.
~ChromotingJniInstance() override;
- void ConnectToHostOnDisplayThread();
- void ConnectToHostOnNetworkThread(
- scoped_ptr<FrameConsumerProxy> frame_consumer);
- void DisconnectFromHostOnNetworkThread();
+ void ConnectToHostOnNetworkThread();
// Notifies the user interface that the user needs to enter a PIN. The
// current authentication attempt is put on hold until |callback| is invoked.
@@ -150,12 +147,9 @@ class ChromotingJniInstance
std::string host_id_;
std::string host_jid_;
- // This group of variables is to be used on the display thread.
- scoped_ptr<JniFrameConsumer> view_;
- scoped_ptr<base::WeakPtrFactory<JniFrameConsumer>> view_weak_factory_;
-
// This group of variables is to be used on the network thread.
scoped_ptr<ClientContext> client_context_;
+ scoped_ptr<JniFrameConsumer> view_;
scoped_ptr<VideoRenderer> video_renderer_;
scoped_ptr<protocol::Authenticator> authenticator_;
scoped_ptr<ChromotingClient> client_;
diff --git a/remoting/client/jni/chromoting_jni_runtime.cc b/remoting/client/jni/chromoting_jni_runtime.cc
index cb7dbb0..18884a0 100644
--- a/remoting/client/jni/chromoting_jni_runtime.cc
+++ b/remoting/client/jni/chromoting_jni_runtime.cc
@@ -18,7 +18,6 @@
#include "jni/JniInterface_jni.h"
#include "media/base/yuv_convert.h"
#include "remoting/base/url_request_context_getter.h"
-#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
using base::android::ConvertJavaStringToUTF8;
using base::android::ConvertUTF8ToJavaString;
@@ -324,9 +323,9 @@ void ChromotingJniRuntime::HandleExtensionMessage(const std::string& type,
}
base::android::ScopedJavaLocalRef<jobject> ChromotingJniRuntime::NewBitmap(
- webrtc::DesktopSize size) {
+ int width, int height) {
JNIEnv* env = base::android::AttachCurrentThread();
- return Java_JniInterface_newBitmap(env, size.width(), size.height());
+ return Java_JniInterface_newBitmap(env, width, height);
}
void ChromotingJniRuntime::UpdateFrameBitmap(jobject bitmap) {
diff --git a/remoting/client/jni/chromoting_jni_runtime.h b/remoting/client/jni/chromoting_jni_runtime.h
index 0d0ea5e..1ca1124 100644
--- a/remoting/client/jni/chromoting_jni_runtime.h
+++ b/remoting/client/jni/chromoting_jni_runtime.h
@@ -97,8 +97,7 @@ class ChromotingJniRuntime {
const std::string& message);
// Creates a new Bitmap object to store a video frame.
- base::android::ScopedJavaLocalRef<jobject> NewBitmap(
- webrtc::DesktopSize size);
+ base::android::ScopedJavaLocalRef<jobject> NewBitmap(int width, int height);
// Updates video frame bitmap. |bitmap| must be an instance of
// android.graphics.Bitmap. Call on the display thread.
diff --git a/remoting/client/jni/jni_frame_consumer.cc b/remoting/client/jni/jni_frame_consumer.cc
index d921bfb..8c84dba 100644
--- a/remoting/client/jni/jni_frame_consumer.cc
+++ b/remoting/client/jni/jni_frame_consumer.cc
@@ -5,11 +5,9 @@
#include "remoting/client/jni/jni_frame_consumer.h"
#include "base/android/jni_android.h"
+#include "base/android/scoped_java_ref.h"
#include "base/logging.h"
-#include "base/stl_util.h"
-#include "base/synchronization/waitable_event.h"
#include "remoting/base/util.h"
-#include "remoting/client/frame_producer.h"
#include "remoting/client/jni/chromoting_jni_instance.h"
#include "remoting/client/jni/chromoting_jni_runtime.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
@@ -18,47 +16,51 @@
namespace remoting {
-JniFrameConsumer::JniFrameConsumer(
- ChromotingJniRuntime* jni_runtime,
- scoped_refptr<ChromotingJniInstance> jni_instance)
- : jni_runtime_(jni_runtime),
- jni_instance_(jni_instance),
- frame_producer_(nullptr) {
-}
+class JniFrameConsumer::Renderer {
+ public:
+ Renderer(ChromotingJniRuntime* jni_runtime) : jni_runtime_(jni_runtime) {}
+ ~Renderer() {
+ DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread());
+ }
-JniFrameConsumer::~JniFrameConsumer() {
- // The producer should now return any pending buffers. At this point, however,
- // ReturnBuffer() tasks scheduled by the producer will not be delivered,
- // so we free all the buffers once the producer's queue is empty.
- base::WaitableEvent done_event(true, false);
- frame_producer_->RequestReturnBuffers(
- base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done_event)));
- done_event.Wait();
-
- STLDeleteElements(&buffers_);
-}
+ void RenderFrame(scoped_ptr<webrtc::DesktopFrame> frame);
-void JniFrameConsumer::set_frame_producer(FrameProducer* producer) {
- frame_producer_ = producer;
-}
+ private:
+ // Used to obtain task runner references and make calls to Java methods.
+ ChromotingJniRuntime* jni_runtime_;
+
+ // This global reference is required, instead of a local reference, so it
+ // remains valid for the lifetime of |bitmap_| - gfx::JavaBitmap does not
+ // create its own global reference internally. And this global ref must be
+ // destroyed (released) after |bitmap_| is destroyed.
+ base::android::ScopedJavaGlobalRef<jobject> bitmap_global_ref_;
-void JniFrameConsumer::ApplyBuffer(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- const webrtc::DesktopRegion* shape) {
+ // Reference to the frame bitmap that is passed to Java when the frame is
+ // allocated. This provides easy access to the underlying pixels.
+ scoped_ptr<gfx::JavaBitmap> bitmap_;
+};
+
+// Function called on the display thread to render the frame.
+void JniFrameConsumer::Renderer::RenderFrame(
+ scoped_ptr<webrtc::DesktopFrame> frame) {
DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread());
- DCHECK(!shape);
-
- if (bitmap_->size().width() != buffer->size().width() ||
- bitmap_->size().height() != buffer->size().height()) {
- // Drop the frame, since the data belongs to the previous generation,
- // before SetSourceSize() called SetOutputSizeAndClip().
- FreeBuffer(buffer);
- return;
+
+ if (!bitmap_ || bitmap_->size().width() != frame->size().width() ||
+ bitmap_->size().height() != frame->size().height()) {
+ // Allocate a new Bitmap, store references here, and pass it to Java.
+ JNIEnv* env = base::android::AttachCurrentThread();
+
+ // |bitmap_| must be deleted before |bitmap_global_ref_| is released.
+ bitmap_.reset();
+ bitmap_global_ref_.Reset(
+ env,
+ jni_runtime_->NewBitmap(frame->size().width(), frame->size().height())
+ .obj());
+ bitmap_.reset(new gfx::JavaBitmap(bitmap_global_ref_.obj()));
+ jni_runtime_->UpdateFrameBitmap(bitmap_global_ref_.obj());
}
- // Copy pixels from |buffer| into the Java Bitmap.
+ // Copy pixels from |frame| into the Java Bitmap.
// TODO(lambroslambrou): Optimize away this copy by having the VideoDecoder
// decode directly into the Bitmap's pixel memory. This currently doesn't
// work very well because the VideoDecoder writes the decoded data in BGRA,
@@ -66,70 +68,53 @@ void JniFrameConsumer::ApplyBuffer(const webrtc::DesktopSize& view_size,
// If a repaint is triggered from a Java event handler, the unswapped pixels
// can sometimes appear on the display.
uint8* dest_buffer = static_cast<uint8*>(bitmap_->pixels());
- webrtc::DesktopRect buffer_rect = webrtc::DesktopRect::MakeSize(view_size);
-
- for (webrtc::DesktopRegion::Iterator i(region); !i.IsAtEnd(); i.Advance()) {
- const webrtc::DesktopRect& rect(i.rect());
- CopyRGB32Rect(buffer->data(), buffer->stride(), buffer_rect, dest_buffer,
- bitmap_->stride(), buffer_rect, rect);
+ webrtc::DesktopRect buffer_rect =
+ webrtc::DesktopRect::MakeSize(frame->size());
+ for (webrtc::DesktopRegion::Iterator i(frame->updated_region()); !i.IsAtEnd();
+ i.Advance()) {
+ CopyRGB32Rect(frame->data(), frame->stride(), buffer_rect, dest_buffer,
+ bitmap_->stride(), buffer_rect, i.rect());
}
- // TODO(lambroslambrou): Optimize this by only repainting the changed pixels.
- base::TimeTicks start_time = base::TimeTicks::Now();
jni_runtime_->RedrawCanvas();
- jni_instance_->RecordPaintTime(
- (base::TimeTicks::Now() - start_time).InMilliseconds());
-
- // Supply |frame_producer_| with a buffer to render the next frame into.
- frame_producer_->DrawBuffer(buffer);
-}
-
-void JniFrameConsumer::ReturnBuffer(webrtc::DesktopFrame* buffer) {
- DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread());
- FreeBuffer(buffer);
}
-void JniFrameConsumer::SetSourceSize(const webrtc::DesktopSize& source_size,
- const webrtc::DesktopVector& dpi) {
- DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread());
-
- // We currently render the desktop 1:1 and perform pan/zoom scaling
- // and cropping on the managed canvas.
- clip_area_ = webrtc::DesktopRect::MakeSize(source_size);
- frame_producer_->SetOutputSizeAndClip(source_size, clip_area_);
+JniFrameConsumer::JniFrameConsumer(ChromotingJniRuntime* jni_runtime)
+ : jni_runtime_(jni_runtime),
+ renderer_(new Renderer(jni_runtime)),
+ weak_factory_(this) {}
- // Allocate buffer and start drawing frames onto it.
- AllocateBuffer(source_size);
+JniFrameConsumer::~JniFrameConsumer() {
+ jni_runtime_->display_task_runner()->DeleteSoon(FROM_HERE,
+ renderer_.release());
}
-FrameConsumer::PixelFormat JniFrameConsumer::GetPixelFormat() {
- return FORMAT_RGBA;
+scoped_ptr<webrtc::DesktopFrame> JniFrameConsumer::AllocateFrame(
+ const webrtc::DesktopSize& size) {
+ return make_scoped_ptr(new webrtc::BasicDesktopFrame(size));
}
-void JniFrameConsumer::AllocateBuffer(const webrtc::DesktopSize& source_size) {
- DCHECK(jni_runtime_->display_task_runner()->BelongsToCurrentThread());
-
- webrtc::DesktopSize size(source_size.width(), source_size.height());
+void JniFrameConsumer::DrawFrame(scoped_ptr<webrtc::DesktopFrame> frame,
+ const base::Closure& done) {
+ DCHECK(jni_runtime_->network_task_runner()->BelongsToCurrentThread());
- // Allocate a new Bitmap, store references here, and pass it to Java.
- JNIEnv* env = base::android::AttachCurrentThread();
+ jni_runtime_->display_task_runner()->PostTaskAndReply(
+ FROM_HERE,
+ base::Bind(&Renderer::RenderFrame, base::Unretained(renderer_.get()),
+ base::Passed(&frame)),
+ base::Bind(&JniFrameConsumer::OnFrameRendered, weak_factory_.GetWeakPtr(),
+ done));
+}
- // |bitmap_| must be deleted before |bitmap_global_ref_| is released.
- bitmap_.reset();
- bitmap_global_ref_.Reset(env, jni_runtime_->NewBitmap(size).obj());
- bitmap_.reset(new gfx::JavaBitmap(bitmap_global_ref_.obj()));
- jni_runtime_->UpdateFrameBitmap(bitmap_global_ref_.obj());
+void JniFrameConsumer::OnFrameRendered(const base::Closure& done) {
+ DCHECK(jni_runtime_->network_task_runner()->BelongsToCurrentThread());
- webrtc::DesktopFrame* buffer = new webrtc::BasicDesktopFrame(size);
- buffers_.push_back(buffer);
- frame_producer_->DrawBuffer(buffer);
+ if (!done.is_null())
+ done.Run();
}
-void JniFrameConsumer::FreeBuffer(webrtc::DesktopFrame* buffer) {
- DCHECK(std::find(buffers_.begin(), buffers_.end(), buffer) != buffers_.end());
-
- buffers_.remove(buffer);
- delete buffer;
+FrameConsumer::PixelFormat JniFrameConsumer::GetPixelFormat() {
+ return FORMAT_RGBA;
}
} // namespace remoting
diff --git a/remoting/client/jni/jni_frame_consumer.h b/remoting/client/jni/jni_frame_consumer.h
index a9a0361..5a9f7e6 100644
--- a/remoting/client/jni/jni_frame_consumer.h
+++ b/remoting/client/jni/jni_frame_consumer.h
@@ -7,80 +7,43 @@
#include <list>
-#include "base/android/scoped_java_ref.h"
#include "base/compiler_specific.h"
-#include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
#include "remoting/client/frame_consumer.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
-namespace gfx {
-class JavaBitmap;
-} // namespace gfx
-
-namespace webrtc {
-class DesktopFrame;
-} // namespace webrtc
-
namespace remoting {
-class ChromotingJniInstance;
+
class ChromotingJniRuntime;
-class FrameProducer;
// FrameConsumer implementation that draws onto a JNI direct byte buffer.
class JniFrameConsumer : public FrameConsumer {
public:
- // The instance does not take ownership of |jni_runtime|.
- explicit JniFrameConsumer(ChromotingJniRuntime* jni_runtime,
- scoped_refptr<ChromotingJniInstance> jni_instance);
+ // Does not take ownership of |jni_runtime|.
+ explicit JniFrameConsumer(ChromotingJniRuntime* jni_runtime);
~JniFrameConsumer() override;
- // This must be called once before the producer's source size is set.
- void set_frame_producer(FrameProducer* producer);
-
// FrameConsumer implementation.
- void ApplyBuffer(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- const webrtc::DesktopRegion* shape) override;
- void ReturnBuffer(webrtc::DesktopFrame* buffer) override;
- void SetSourceSize(const webrtc::DesktopSize& source_size,
- const webrtc::DesktopVector& dpi) override;
+ scoped_ptr<webrtc::DesktopFrame> AllocateFrame(
+ const webrtc::DesktopSize& size) override;
+ void DrawFrame(scoped_ptr<webrtc::DesktopFrame> frame,
+ const base::Closure& done) override;
PixelFormat GetPixelFormat() override;
private:
- // Allocates a new buffer of |source_size|, informs Java about it, and tells
- // the producer to draw onto it.
- void AllocateBuffer(const webrtc::DesktopSize& source_size);
+ class Renderer;
- // Frees a frame buffer previously allocated by AllocateBuffer.
- void FreeBuffer(webrtc::DesktopFrame* buffer);
-
- // Variables are to be used from the display thread.
+ void OnFrameRendered(const base::Closure& done);
// Used to obtain task runner references and make calls to Java methods.
ChromotingJniRuntime* jni_runtime_;
- // Used to record statistics.
- scoped_refptr<ChromotingJniInstance> jni_instance_;
-
- FrameProducer* frame_producer_;
- webrtc::DesktopRect clip_area_;
-
- // List of allocated image buffers.
- std::list<webrtc::DesktopFrame*> buffers_;
-
- // This global reference is required, instead of a local reference, so it
- // remains valid for the lifetime of |bitmap_| - gfx::JavaBitmap does not
- // create its own global reference internally. And this global ref must be
- // destroyed (released) after |bitmap_| is destroyed.
- base::android::ScopedJavaGlobalRef<jobject> bitmap_global_ref_;
+ // Renderer object used to render the frames on the display thread.
+ scoped_ptr<Renderer> renderer_;
- // Reference to the frame bitmap that is passed to Java when the frame is
- // allocated. This provides easy access to the underlying pixels.
- scoped_ptr<gfx::JavaBitmap> bitmap_;
+ base::WeakPtrFactory<JniFrameConsumer> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(JniFrameConsumer);
};
diff --git a/remoting/client/plugin/pepper_video_renderer_2d.cc b/remoting/client/plugin/pepper_video_renderer_2d.cc
index feca26b..5d07277 100644
--- a/remoting/client/plugin/pepper_video_renderer_2d.cc
+++ b/remoting/client/plugin/pepper_video_renderer_2d.cc
@@ -4,12 +4,10 @@
#include "remoting/client/plugin/pepper_video_renderer_2d.h"
-#include <functional>
-
#include "base/bind.h"
+#include "base/callback_helpers.h"
#include "base/strings/string_util.h"
-#include "base/synchronization/waitable_event.h"
-#include "base/time/time.h"
+#include "base/task_runner_util.h"
#include "ppapi/cpp/completion_callback.h"
#include "ppapi/cpp/image_data.h"
#include "ppapi/cpp/instance.h"
@@ -19,19 +17,26 @@
#include "remoting/base/util.h"
#include "remoting/client/chromoting_stats.h"
#include "remoting/client/client_context.h"
-#include "remoting/client/frame_consumer_proxy.h"
-#include "remoting/client/frame_producer.h"
#include "remoting/client/software_video_renderer.h"
#include "remoting/proto/video.pb.h"
+#include "third_party/libyuv/include/libyuv/scale_argb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
+namespace remoting {
+
namespace {
// DesktopFrame that wraps a supplied pp::ImageData
class PepperDesktopFrame : public webrtc::DesktopFrame {
public:
// Wraps the supplied ImageData.
- explicit PepperDesktopFrame(const pp::ImageData& buffer);
+ explicit PepperDesktopFrame(const pp::ImageData& buffer)
+ : DesktopFrame(
+ webrtc::DesktopSize(buffer.size().width(), buffer.size().height()),
+ buffer.stride(),
+ reinterpret_cast<uint8_t*>(buffer.data()),
+ nullptr),
+ buffer_(buffer) {}
// Access to underlying pepper representation.
const pp::ImageData& buffer() const {
@@ -42,57 +47,18 @@ class PepperDesktopFrame : public webrtc::DesktopFrame {
pp::ImageData buffer_;
};
-PepperDesktopFrame::PepperDesktopFrame(const pp::ImageData& buffer)
- : DesktopFrame(webrtc::DesktopSize(buffer.size().width(),
- buffer.size().height()),
- buffer.stride(),
- reinterpret_cast<uint8_t*>(buffer.data()),
- nullptr),
- buffer_(buffer) {}
-
-} // namespace
-
-namespace remoting {
-
-namespace {
-
-// The maximum number of image buffers to be allocated at any point of time.
-const size_t kMaxPendingBuffersCount = 2;
-
} // namespace
PepperVideoRenderer2D::PepperVideoRenderer2D()
- : instance_(nullptr),
- event_handler_(nullptr),
- merge_buffer_(nullptr),
- dips_to_device_scale_(1.0f),
- dips_to_view_scale_(1.0f),
- flush_pending_(false),
- frame_received_(false),
- debug_dirty_region_(false),
- callback_factory_(this),
- weak_factory_(this) {
-}
+ : callback_factory_(this),
+ weak_factory_(this) {}
-PepperVideoRenderer2D::~PepperVideoRenderer2D() {
- // The producer should now return any pending buffers. At this point, however,
- // ReturnBuffer() tasks scheduled by the producer will not be delivered,
- // so we free all the buffers once the producer's queue is empty.
- base::WaitableEvent done_event(true, false);
- software_video_renderer_->RequestReturnBuffers(
- base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done_event)));
- done_event.Wait();
-
- merge_buffer_ = nullptr;
- while (!buffers_.empty()) {
- FreeBuffer(buffers_.front());
- }
-}
+PepperVideoRenderer2D::~PepperVideoRenderer2D() {}
bool PepperVideoRenderer2D::Initialize(pp::Instance* instance,
const ClientContext& context,
EventHandler* event_handler) {
- DCHECK(CalledOnValidThread());
+ DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!instance_);
DCHECK(!event_handler_);
DCHECK(instance);
@@ -100,83 +66,29 @@ bool PepperVideoRenderer2D::Initialize(pp::Instance* instance,
instance_ = instance;
event_handler_ = event_handler;
- scoped_ptr<FrameConsumerProxy> frame_consumer_proxy =
- make_scoped_ptr(new FrameConsumerProxy(weak_factory_.GetWeakPtr()));
- software_video_renderer_.reset(new SoftwareVideoRenderer(
- context.main_task_runner(), context.decode_task_runner(),
- frame_consumer_proxy.Pass()));
+ software_video_renderer_.reset(
+ new SoftwareVideoRenderer(context.decode_task_runner(), this));
return true;
}
void PepperVideoRenderer2D::OnViewChanged(const pp::View& view) {
- DCHECK(CalledOnValidThread());
-
- bool view_changed = false;
+ DCHECK(thread_checker_.CalledOnValidThread());
pp::Rect pp_size = view.GetRect();
- webrtc::DesktopSize new_dips_size(pp_size.width(), pp_size.height());
- float new_dips_to_device_scale = view.GetDeviceScale();
-
- if (!dips_size_.equals(new_dips_size) ||
- dips_to_device_scale_ != new_dips_to_device_scale) {
- view_changed = true;
- dips_to_device_scale_ = new_dips_to_device_scale;
- dips_size_ = new_dips_size;
-
- // If |dips_to_device_scale_| is > 1.0 then the device is high-DPI, and
- // there are actually |view_device_scale_| physical pixels for every one
- // Density Independent Pixel (DIP). If we specify a scale of 1.0 to
- // Graphics2D then we can render at DIP resolution and let PPAPI up-scale
- // for high-DPI devices.
- dips_to_view_scale_ = 1.0f;
- view_size_ = dips_size_;
-
- // If the view's DIP dimensions don't match the source then let the frame
- // producer do the scaling, and render at device resolution.
- if (!dips_size_.equals(source_size_)) {
- dips_to_view_scale_ = dips_to_device_scale_;
- view_size_.set(ceilf(dips_size_.width() * dips_to_view_scale_),
- ceilf(dips_size_.height() * dips_to_view_scale_));
- }
-
- // Create a 2D rendering context at the chosen frame dimensions.
- pp::Size pp_size = pp::Size(view_size_.width(), view_size_.height());
- graphics2d_ = pp::Graphics2D(instance_, pp_size, false);
+ view_size_ = webrtc::DesktopSize(pp_size.width(), pp_size.height());
- // Specify the scale from our coordinates to DIPs.
- graphics2d_.SetScale(1.0f / dips_to_view_scale_);
+ // Update scale if graphics2d has been initialized.
+ if (!graphics2d_.is_null() && source_size_.width() > 0) {
+ graphics2d_.SetScale(static_cast<float>(view_size_.width()) /
+ source_size_.width());
+ // Bind graphics2d_ again after changing the scale to work around
+ // crbug.com/521745 .
+ instance_->BindGraphics(graphics2d_);
bool result = instance_->BindGraphics(graphics2d_);
-
- // There is no good way to handle this error currently.
DCHECK(result) << "Couldn't bind the device context.";
}
-
- // Ignore clip rectangle provided by the browser because it may not be
- // correct. See crbug.com/360240 . In case when the plugin is not visible
- // (e.g. another tab is selected) |clip_area_| is set to empty rectangle,
- // otherwise it's set to a rectangle that covers the whole plugin.
- //
- // TODO(sergeyu): Use view.GetClipRect() here after bug 360240 is fixed.
- webrtc::DesktopRect new_clip =
- view.IsVisible() ? webrtc::DesktopRect::MakeWH(
- ceilf(pp_size.width() * dips_to_view_scale_),
- ceilf(pp_size.height() * dips_to_view_scale_))
- : webrtc::DesktopRect();
- if (!clip_area_.equals(new_clip)) {
- view_changed = true;
-
- // YUV to RGB conversion may require even X and Y coordinates for
- // the top left corner of the clipping area.
- clip_area_ = AlignRect(new_clip);
- clip_area_.IntersectWith(webrtc::DesktopRect::MakeSize(view_size_));
- }
-
- if (view_changed) {
- software_video_renderer_->SetOutputSizeAndClip(view_size_, clip_area_);
- AllocateBuffers();
- }
}
void PepperVideoRenderer2D::EnableDebugDirtyRegion(bool enable) {
@@ -185,194 +97,132 @@ void PepperVideoRenderer2D::EnableDebugDirtyRegion(bool enable) {
void PepperVideoRenderer2D::OnSessionConfig(
const protocol::SessionConfig& config) {
- DCHECK(CalledOnValidThread());
+ DCHECK(thread_checker_.CalledOnValidThread());
software_video_renderer_->OnSessionConfig(config);
- AllocateBuffers();
}
ChromotingStats* PepperVideoRenderer2D::GetStats() {
- DCHECK(CalledOnValidThread());
+ DCHECK(thread_checker_.CalledOnValidThread());
return software_video_renderer_->GetStats();
}
protocol::VideoStub* PepperVideoRenderer2D::GetVideoStub() {
- DCHECK(CalledOnValidThread());
+ DCHECK(thread_checker_.CalledOnValidThread());
return software_video_renderer_->GetVideoStub();
}
-void PepperVideoRenderer2D::ApplyBuffer(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- const webrtc::DesktopRegion* shape) {
- DCHECK(CalledOnValidThread());
+scoped_ptr<webrtc::DesktopFrame> PepperVideoRenderer2D::AllocateFrame(
+ const webrtc::DesktopSize& size) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ pp::ImageData buffer_data(instance_, PP_IMAGEDATAFORMAT_BGRA_PREMUL,
+ pp::Size(size.width(), size.height()), false);
+ return make_scoped_ptr(new PepperDesktopFrame(buffer_data));
+}
+
+void PepperVideoRenderer2D::DrawFrame(scoped_ptr<webrtc::DesktopFrame> frame,
+ const base::Closure& done) {
+ DCHECK(thread_checker_.CalledOnValidThread());
if (!frame_received_) {
event_handler_->OnVideoFirstFrameReceived();
frame_received_ = true;
}
- // We cannot use the data in the buffer if its dimensions don't match the
- // current view size.
- if (!view_size_.equals(view_size)) {
- FreeBuffer(buffer);
- AllocateBuffers();
- } else {
- FlushBuffer(clip_area, buffer, region);
- if (shape) {
- if (!source_shape_ || !source_shape_->Equals(*shape)) {
- source_shape_ = make_scoped_ptr(new webrtc::DesktopRegion(*shape));
- event_handler_->OnVideoShape(source_shape_.get());
- }
- } else if (source_shape_) {
- source_shape_ = nullptr;
- event_handler_->OnVideoShape(nullptr);
- }
- }
-}
-
-void PepperVideoRenderer2D::ReturnBuffer(webrtc::DesktopFrame* buffer) {
- DCHECK(CalledOnValidThread());
-
- // Reuse the buffer if it is large enough, otherwise drop it on the floor
- // and allocate a new one.
- if (buffer->size().width() >= clip_area_.width() &&
- buffer->size().height() >= clip_area_.height()) {
- software_video_renderer_->DrawBuffer(buffer);
- } else {
- FreeBuffer(buffer);
- AllocateBuffers();
- }
-}
-void PepperVideoRenderer2D::SetSourceSize(
- const webrtc::DesktopSize& source_size,
- const webrtc::DesktopVector& source_dpi) {
- DCHECK(CalledOnValidThread());
-
- if (source_size_.equals(source_size) && source_dpi_.equals(source_dpi))
- return;
+ bool size_changed = !source_size_.equals(frame->size());
+ if (size_changed) {
+ source_size_ = frame->size();
- source_size_ = source_size;
- source_dpi_ = source_dpi;
+ // Create a 2D rendering context with the new dimensions.
+ graphics2d_ = pp::Graphics2D(
+ instance_, pp::Size(source_size_.width(), source_size_.height()), true);
+ graphics2d_.SetScale(static_cast<float>(view_size_.width()) /
+ source_size_.width());
+ bool result = instance_->BindGraphics(graphics2d_);
+ DCHECK(result) << "Couldn't bind the device context.";
+ }
- // Notify JavaScript of the change in source size.
- event_handler_->OnVideoSize(source_size, source_dpi);
-}
-FrameConsumer::PixelFormat PepperVideoRenderer2D::GetPixelFormat() {
- return FORMAT_BGRA;
-}
+ if (size_changed || !source_dpi_.equals(frame->dpi())) {
+ source_dpi_ = frame->dpi();
-void PepperVideoRenderer2D::AllocateBuffers() {
- if (clip_area_.width() == 0 || clip_area_.height() == 0)
- return;
+ // Notify JavaScript of the change in source size.
+ event_handler_->OnVideoSize(source_size_, source_dpi_);
+ }
- while (buffers_.size() < kMaxPendingBuffersCount) {
- // Create an image buffer of the required size, but don't zero it.
- pp::ImageData buffer_data(instance_, PP_IMAGEDATAFORMAT_BGRA_PREMUL,
- pp::Size(clip_area_.width(), clip_area_.height()),
- false);
- if (buffer_data.is_null()) {
- LOG(WARNING) << "Not enough memory for frame buffers.";
- break;
+ const webrtc::DesktopRegion* shape = frame->shape();
+ if (shape) {
+ if (!source_shape_ || !source_shape_->Equals(*shape)) {
+ source_shape_ = make_scoped_ptr(new webrtc::DesktopRegion(*shape));
+ event_handler_->OnVideoShape(source_shape_.get());
}
-
- webrtc::DesktopFrame* buffer = new PepperDesktopFrame(buffer_data);
- buffers_.push_back(buffer);
- software_video_renderer_->DrawBuffer(buffer);
+ } else if (source_shape_) {
+ source_shape_ = nullptr;
+ event_handler_->OnVideoShape(nullptr);
}
-}
-void PepperVideoRenderer2D::FreeBuffer(webrtc::DesktopFrame* buffer) {
- DCHECK(std::find(buffers_.begin(), buffers_.end(), buffer) != buffers_.end());
-
- buffers_.remove(buffer);
- delete buffer;
-}
+ // If Debug dirty region is enabled then emit it.
+ if (debug_dirty_region_)
+ event_handler_->OnVideoFrameDirtyRegion(frame->updated_region());
+
+ const pp::ImageData& image_data =
+ static_cast<PepperDesktopFrame*>(frame.get())->buffer();
+ for (webrtc::DesktopRegion::Iterator i(frame->updated_region()); !i.IsAtEnd();
+ i.Advance()) {
+ graphics2d_.PaintImageData(image_data, pp::Point(0, 0),
+ pp::Rect(i.rect().left(), i.rect().top(),
+ i.rect().width(), i.rect().height()));
+ }
-void PepperVideoRenderer2D::FlushBuffer(const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region) {
- // Defer drawing if the flush is already in progress.
- if (flush_pending_) {
- // |merge_buffer_| is guaranteed to be free here because we allocate only
- // two buffers simultaneously. If more buffers are allowed this code should
- // apply all pending changes to the screen.
- DCHECK(merge_buffer_ == nullptr);
-
- merge_clip_area_ = clip_area;
- merge_buffer_ = buffer;
- merge_region_ = region;
- return;
+ if (!done.is_null()) {
+ pending_frames_done_callbacks_.push_back(
+ new base::ScopedClosureRunner(done));
}
- // Notify Pepper API about the updated areas and flush pixels to the screen.
- base::Time start_time = base::Time::Now();
+ need_flush_ = true;
- for (webrtc::DesktopRegion::Iterator i(region); !i.IsAtEnd(); i.Advance()) {
- webrtc::DesktopRect rect = i.rect();
+ Flush();
+}
- // Re-clip |region| with the current clipping area |clip_area_| because
- // the latter could change from the time the buffer was drawn.
- rect.IntersectWith(clip_area_);
- if (rect.is_empty())
- continue;
+FrameConsumer::PixelFormat PepperVideoRenderer2D::GetPixelFormat() {
+ return FORMAT_BGRA;
+}
- // Specify the rectangle coordinates relative to the clipping area.
- rect.Translate(-clip_area.left(), -clip_area.top());
+void PepperVideoRenderer2D::Flush() {
+ DCHECK(thread_checker_.CalledOnValidThread());
- // Pepper Graphics 2D has a strange and badly documented API that the
- // point here is the offset from the source rect. Why?
- graphics2d_.PaintImageData(
- static_cast<PepperDesktopFrame*>(buffer)->buffer(),
- pp::Point(clip_area.left(), clip_area.top()),
- pp::Rect(rect.left(), rect.top(), rect.width(), rect.height()));
- }
+ if (flush_pending_ || !need_flush_)
+ return;
- // Notify the producer that some parts of the region weren't painted because
- // the clipping area has changed already.
- if (!clip_area.equals(clip_area_)) {
- webrtc::DesktopRegion not_painted = region;
- not_painted.Subtract(clip_area_);
- if (!not_painted.is_empty()) {
- software_video_renderer_->InvalidateRegion(not_painted);
- }
- }
+ need_flush_ = false;
+
+ // Move callbacks from |pending_frames_done_callbacks_| to
+ // |flushing_frames_done_callbacks_| so the callbacks are called when flush is
+ // finished.
+ DCHECK(flushing_frames_done_callbacks_.empty());
+ flushing_frames_done_callbacks_ = pending_frames_done_callbacks_.Pass();
// Flush the updated areas to the screen.
- pp::CompletionCallback callback = callback_factory_.NewCallback(
- &PepperVideoRenderer2D::OnFlushDone, start_time, buffer);
- int error = graphics2d_.Flush(callback);
+ int error = graphics2d_.Flush(
+ callback_factory_.NewCallback(&PepperVideoRenderer2D::OnFlushDone));
CHECK(error == PP_OK_COMPLETIONPENDING);
flush_pending_ = true;
-
- // If Debug dirty region is enabled then emit it.
- if (debug_dirty_region_) {
- event_handler_->OnVideoFrameDirtyRegion(region);
- }
}
-void PepperVideoRenderer2D::OnFlushDone(int result,
- const base::Time& paint_start,
- webrtc::DesktopFrame* buffer) {
- DCHECK(CalledOnValidThread());
- DCHECK(flush_pending_);
-
- software_video_renderer_->GetStats()->RecordPaintTime(
- (base::Time::Now() - paint_start).InMilliseconds());
+void PepperVideoRenderer2D::OnFlushDone(int result) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(flush_pending_);
flush_pending_ = false;
- ReturnBuffer(buffer);
- // If there is a buffer queued for rendering then render it now.
- if (merge_buffer_) {
- buffer = merge_buffer_;
- merge_buffer_ = nullptr;
- FlushBuffer(merge_clip_area_, buffer, merge_region_);
- }
+ // Call all callbacks for the frames we've just flushed.
+ flushing_frames_done_callbacks_.clear();
+
+ // Flush again if necessary.
+ Flush();
}
} // namespace remoting
diff --git a/remoting/client/plugin/pepper_video_renderer_2d.h b/remoting/client/plugin/pepper_video_renderer_2d.h
index 21ac4ec..6f8d35d 100644
--- a/remoting/client/plugin/pepper_video_renderer_2d.h
+++ b/remoting/client/plugin/pepper_video_renderer_2d.h
@@ -9,9 +9,11 @@
#include "base/compiler_specific.h"
#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_vector.h"
#include "base/memory/weak_ptr.h"
-#include "base/threading/non_thread_safe.h"
+#include "base/threading/thread_checker.h"
#include "ppapi/cpp/graphics_2d.h"
+#include "ppapi/cpp/image_data.h"
#include "ppapi/cpp/point.h"
#include "ppapi/cpp/view.h"
#include "ppapi/utility/completion_callback_factory.h"
@@ -21,11 +23,12 @@
#include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
namespace base {
-class Time;
+class ScopedClosureRunner;
} // namespace base
namespace webrtc {
class DesktopFrame;
+class SharedDesktopFrame;
} // namespace webrtc
namespace remoting {
@@ -35,16 +38,15 @@ class SoftwareVideoRenderer;
// Video renderer that wraps SoftwareVideoRenderer and displays it using Pepper
// 2D graphics API.
class PepperVideoRenderer2D : public PepperVideoRenderer,
- public FrameConsumer,
- public base::NonThreadSafe {
+ public FrameConsumer {
public:
PepperVideoRenderer2D();
~PepperVideoRenderer2D() override;
// PepperVideoRenderer interface.
bool Initialize(pp::Instance* instance,
- const ClientContext& context,
- EventHandler* event_handler) override;
+ const ClientContext& context,
+ EventHandler* event_handler) override;
void OnViewChanged(const pp::View& view) override;
void EnableDebugDirtyRegion(bool enable) override;
@@ -55,69 +57,27 @@ class PepperVideoRenderer2D : public PepperVideoRenderer,
private:
// FrameConsumer implementation.
- void ApplyBuffer(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region,
- const webrtc::DesktopRegion* shape) override;
- void ReturnBuffer(webrtc::DesktopFrame* buffer) override;
- void SetSourceSize(const webrtc::DesktopSize& source_size,
- const webrtc::DesktopVector& dpi) override;
+ scoped_ptr<webrtc::DesktopFrame> AllocateFrame(
+ const webrtc::DesktopSize& size) override;
+ void DrawFrame(scoped_ptr<webrtc::DesktopFrame> frame,
+ const base::Closure& done) override;
PixelFormat GetPixelFormat() override;
- // Helper to allocate buffers for the decoder.
- void AllocateBuffers();
- // Frees a frame buffer previously allocated by AllocateBuffer.
- void FreeBuffer(webrtc::DesktopFrame* buffer);
-
- // Renders the parts of |buffer| identified by |region| to the view. If the
- // clip area of the view has changed since the buffer was generated then
- // FrameProducer is supplied the missed parts of |region|. The FrameProducer
- // will be supplied a new buffer when FlushBuffer() completes.
- void FlushBuffer(const webrtc::DesktopRect& clip_area,
- webrtc::DesktopFrame* buffer,
- const webrtc::DesktopRegion& region);
-
- // Handles completion of FlushBuffer(), triggering a new buffer to be
- // returned to FrameProducer for rendering.
- void OnFlushDone(int result,
- const base::Time& paint_start,
- webrtc::DesktopFrame* buffer);
+ void Flush();
+ void OnFlushDone(int result);
// Parameters passed to Initialize().
- pp::Instance* instance_;
- EventHandler* event_handler_;
+ pp::Instance* instance_ = nullptr;
+ EventHandler* event_handler_ = nullptr;
pp::Graphics2D graphics2d_;
scoped_ptr<SoftwareVideoRenderer> software_video_renderer_;
- // List of allocated image buffers.
- std::list<webrtc::DesktopFrame*> buffers_;
-
- // Queued buffer to paint, with clip area and dirty region in device pixels.
- webrtc::DesktopFrame* merge_buffer_;
- webrtc::DesktopRect merge_clip_area_;
- webrtc::DesktopRegion merge_region_;
-
- // View size in Density Independent Pixels (DIPs).
- webrtc::DesktopSize dips_size_;
-
- // Scale factor from DIPs to device pixels.
- float dips_to_device_scale_;
-
- // View size in output pixels. This is the size at which FrameProducer must
- // render frames. It usually matches the DIPs size of the view, but may match
- // the size in device pixels when scaling is in effect, to reduce artefacts.
+ // View size in output pixels.
webrtc::DesktopSize view_size_;
- // Scale factor from output pixels to device pixels.
- float dips_to_view_scale_;
-
- // Visible area of the view, in output pixels.
- webrtc::DesktopRect clip_area_;
-
// Size of the most recent source frame in pixels.
webrtc::DesktopSize source_size_;
@@ -127,14 +87,25 @@ class PepperVideoRenderer2D : public PepperVideoRenderer,
// Shape of the most recent source frame.
scoped_ptr<webrtc::DesktopRegion> source_shape_;
+ // Done callbacks for the frames that have been painted but not flushed.
+ ScopedVector<base::ScopedClosureRunner> pending_frames_done_callbacks_;
+
+ // Done callbacks for the frames that are currently being flushed.
+ ScopedVector<base::ScopedClosureRunner> flushing_frames_done_callbacks_;
+
+ // True if there paint operations that need to be flushed.
+ bool need_flush_ = false;
+
// True if there is already a Flush() pending on the Graphics2D context.
- bool flush_pending_;
+ bool flush_pending_ = false;
- // True after the first call to ApplyBuffer().
- bool frame_received_;
+ // True after the first call to DrawFrame().
+ bool frame_received_ = false;
// True if dirty regions are to be sent to |event_handler_| for debugging.
- bool debug_dirty_region_;
+ bool debug_dirty_region_ = false;
+
+ base::ThreadChecker thread_checker_;
pp::CompletionCallbackFactory<PepperVideoRenderer2D> callback_factory_;
base::WeakPtrFactory<PepperVideoRenderer2D> weak_factory_;
diff --git a/remoting/client/software_video_renderer.cc b/remoting/client/software_video_renderer.cc
index b063b2d..89a86a9 100644
--- a/remoting/client/software_video_renderer.cc
+++ b/remoting/client/software_video_renderer.cc
@@ -4,29 +4,30 @@
#include "remoting/client/software_video_renderer.h"
-#include <list>
-
#include "base/bind.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/single_thread_task_runner.h"
+#include "base/task_runner_util.h"
#include "remoting/base/util.h"
#include "remoting/client/frame_consumer.h"
#include "remoting/codec/video_decoder.h"
#include "remoting/codec/video_decoder_verbatim.h"
#include "remoting/codec/video_decoder_vpx.h"
+#include "remoting/proto/video.pb.h"
#include "remoting/protocol/session_config.h"
#include "third_party/libyuv/include/libyuv/convert_argb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
-using base::Passed;
using remoting::protocol::ChannelConfig;
using remoting::protocol::SessionConfig;
namespace remoting {
+namespace {
+
// This class wraps a VideoDecoder and byte-swaps the pixels for compatibility
// with the android.graphics.Bitmap class.
// TODO(lambroslambrou): Refactor so that the VideoDecoder produces data
@@ -34,8 +35,7 @@ namespace remoting {
class RgbToBgrVideoDecoderFilter : public VideoDecoder {
public:
RgbToBgrVideoDecoderFilter(scoped_ptr<VideoDecoder> parent)
- : parent_(parent.Pass()) {
- }
+ : parent_(parent.Pass()) {}
bool DecodePacket(const VideoPacket& packet) override {
return parent_->DecodePacket(packet);
@@ -58,7 +58,7 @@ class RgbToBgrVideoDecoderFilter : public VideoDecoder {
i.Advance()) {
webrtc::DesktopRect rect = i.rect();
uint8* pixels = image_buffer + (rect.top() * image_stride) +
- (rect.left() * kBytesPerPixel);
+ (rect.left() * kBytesPerPixel);
libyuv::ABGRToARGB(pixels, image_stride, pixels, image_stride,
rect.width(), rect.height());
}
@@ -72,71 +72,41 @@ class RgbToBgrVideoDecoderFilter : public VideoDecoder {
scoped_ptr<VideoDecoder> parent_;
};
-class SoftwareVideoRenderer::Core {
- public:
- Core(scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
- scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
- scoped_ptr<FrameConsumerProxy> consumer);
- ~Core();
-
- void OnSessionConfig(const protocol::SessionConfig& config);
- void DrawBuffer(webrtc::DesktopFrame* buffer);
- void InvalidateRegion(const webrtc::DesktopRegion& region);
- void RequestReturnBuffers(const base::Closure& done);
- void SetOutputSizeAndClip(
- const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area);
-
- // Decodes the contents of |packet|. DecodePacket may keep a reference to
- // |packet| so the |packet| must remain alive and valid until |done| is
- // executed.
- void DecodePacket(scoped_ptr<VideoPacket> packet, const base::Closure& done);
-
- private:
- // Paints the invalidated region to the next available buffer and returns it
- // to the consumer.
- void SchedulePaint();
- void DoPaint();
-
- scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
- scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner_;
- scoped_ptr<FrameConsumerProxy> consumer_;
- scoped_ptr<VideoDecoder> decoder_;
+scoped_ptr<webrtc::DesktopFrame> DoDecodeFrame(
+ VideoDecoder* decoder,
+ scoped_ptr<VideoPacket> packet,
+ scoped_ptr<webrtc::DesktopFrame> frame) {
+ if (!decoder->DecodePacket(*packet))
+ return nullptr;
- // Remote screen size in pixels.
- webrtc::DesktopSize source_size_;
+ decoder->RenderFrame(
+ frame->size(), webrtc::DesktopRect::MakeSize(frame->size()),
+ frame->data(), frame->stride(), frame->mutable_updated_region());
- // Vertical and horizontal DPI of the remote screen.
- webrtc::DesktopVector source_dpi_;
+ const webrtc::DesktopRegion* shape = decoder->GetImageShape();
+ if (shape)
+ frame->set_shape(new webrtc::DesktopRegion(*shape));
- // The current dimensions of the frame consumer view.
- webrtc::DesktopSize view_size_;
- webrtc::DesktopRect clip_area_;
-
- // The drawing buffers supplied by the frame consumer.
- std::list<webrtc::DesktopFrame*> buffers_;
-
- // Flag used to coalesce runs of SchedulePaint()s into a single DoPaint().
- bool paint_scheduled_;
+ return frame.Pass();
+}
- base::WeakPtrFactory<Core> weak_factory_;
-};
+} // namespace
-SoftwareVideoRenderer::Core::Core(
- scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
+SoftwareVideoRenderer::SoftwareVideoRenderer(
scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
- scoped_ptr<FrameConsumerProxy> consumer)
- : main_task_runner_(main_task_runner),
- decode_task_runner_(decode_task_runner),
- consumer_(consumer.Pass()),
- paint_scheduled_(false),
+ FrameConsumer* consumer)
+ : decode_task_runner_(decode_task_runner),
+ consumer_(consumer),
weak_factory_(this) {}
-SoftwareVideoRenderer::Core::~Core() {
+SoftwareVideoRenderer::~SoftwareVideoRenderer() {
+ if (decoder_)
+ decode_task_runner_->DeleteSoon(FROM_HERE, decoder_.release());
}
-void SoftwareVideoRenderer::Core::OnSessionConfig(const SessionConfig& config) {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
+void SoftwareVideoRenderer::OnSessionConfig(
+ const protocol::SessionConfig& config) {
+ DCHECK(thread_checker_.CalledOnValidThread());
// Initialize decoder based on the selected codec.
ChannelConfig::Codec codec = config.video_config().codec;
@@ -157,248 +127,92 @@ void SoftwareVideoRenderer::Core::OnSessionConfig(const SessionConfig& config) {
}
}
-void SoftwareVideoRenderer::Core::DecodePacket(scoped_ptr<VideoPacket> packet,
- const base::Closure& done) {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
-
- bool notify_size_or_dpi_change = false;
-
- // If the packet includes screen size or DPI information, store them.
- if (packet->format().has_screen_width() &&
- packet->format().has_screen_height()) {
- webrtc::DesktopSize source_size(packet->format().screen_width(),
- packet->format().screen_height());
- if (!source_size_.equals(source_size)) {
- source_size_ = source_size;
- notify_size_or_dpi_change = true;
- }
- }
- if (packet->format().has_x_dpi() && packet->format().has_y_dpi()) {
- webrtc::DesktopVector source_dpi(packet->format().x_dpi(),
- packet->format().y_dpi());
- if (!source_dpi.equals(source_dpi_)) {
- source_dpi_ = source_dpi;
- notify_size_or_dpi_change = true;
- }
- }
-
- // If we've never seen a screen size, ignore the packet.
- if (source_size_.is_empty()) {
- main_task_runner_->PostTask(FROM_HERE, base::Bind(done));
- return;
- }
-
- if (notify_size_or_dpi_change)
- consumer_->SetSourceSize(source_size_, source_dpi_);
-
- if (decoder_->DecodePacket(*packet.get())) {
- SchedulePaint();
- } else {
- LOG(ERROR) << "DecodePacket() failed.";
- }
-
- main_task_runner_->PostTask(FROM_HERE, base::Bind(done));
-}
-
-void SoftwareVideoRenderer::Core::SchedulePaint() {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
- if (paint_scheduled_)
- return;
- paint_scheduled_ = true;
- decode_task_runner_->PostTask(
- FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::DoPaint,
- weak_factory_.GetWeakPtr()));
-}
-
-void SoftwareVideoRenderer::Core::DoPaint() {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
- DCHECK(paint_scheduled_);
- paint_scheduled_ = false;
-
- // If the view size is empty or we have no output buffers ready, return.
- if (buffers_.empty() || view_size_.is_empty())
- return;
-
- // If no Decoder is initialized, or the host dimensions are empty, return.
- if (!decoder_.get() || source_size_.is_empty())
- return;
-
- // Draw the invalidated region to the buffer.
- webrtc::DesktopFrame* buffer = buffers_.front();
- webrtc::DesktopRegion output_region;
- decoder_->RenderFrame(view_size_, clip_area_,
- buffer->data(), buffer->stride(), &output_region);
-
- // Notify the consumer that painting is done.
- if (!output_region.is_empty()) {
- buffers_.pop_front();
- consumer_->ApplyBuffer(view_size_, clip_area_, buffer, output_region,
- decoder_->GetImageShape());
- }
-}
-
-void SoftwareVideoRenderer::Core::RequestReturnBuffers(
- const base::Closure& done) {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
-
- while (!buffers_.empty()) {
- consumer_->ReturnBuffer(buffers_.front());
- buffers_.pop_front();
- }
-
- if (!done.is_null())
- done.Run();
-}
-
-void SoftwareVideoRenderer::Core::DrawBuffer(webrtc::DesktopFrame* buffer) {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
- DCHECK(clip_area_.width() <= buffer->size().width() &&
- clip_area_.height() <= buffer->size().height());
-
- buffers_.push_back(buffer);
- SchedulePaint();
-}
-
-void SoftwareVideoRenderer::Core::InvalidateRegion(
- const webrtc::DesktopRegion& region) {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
-
- if (decoder_.get()) {
- decoder_->Invalidate(view_size_, region);
- SchedulePaint();
- }
-}
-
-void SoftwareVideoRenderer::Core::SetOutputSizeAndClip(
- const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area) {
- DCHECK(decode_task_runner_->BelongsToCurrentThread());
-
- // The whole frame needs to be repainted if the scaling factor has changed.
- if (!view_size_.equals(view_size) && decoder_.get()) {
- webrtc::DesktopRegion region;
- region.AddRect(webrtc::DesktopRect::MakeSize(view_size));
- decoder_->Invalidate(view_size, region);
- }
-
- if (!view_size_.equals(view_size) ||
- !clip_area_.equals(clip_area)) {
- view_size_ = view_size;
- clip_area_ = clip_area;
-
- // Return buffers that are smaller than needed to the consumer for
- // reuse/reallocation.
- std::list<webrtc::DesktopFrame*>::iterator i = buffers_.begin();
- while (i != buffers_.end()) {
- if ((*i)->size().width() < clip_area_.width() ||
- (*i)->size().height() < clip_area_.height()) {
- consumer_->ReturnBuffer(*i);
- i = buffers_.erase(i);
- } else {
- ++i;
- }
- }
-
- SchedulePaint();
- }
-}
-
-SoftwareVideoRenderer::SoftwareVideoRenderer(
- scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
- scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
- scoped_ptr<FrameConsumerProxy> consumer)
- : decode_task_runner_(decode_task_runner),
- core_(new Core(main_task_runner, decode_task_runner, consumer.Pass())),
- weak_factory_(this) {
- DCHECK(CalledOnValidThread());
-}
-
-SoftwareVideoRenderer::~SoftwareVideoRenderer() {
- DCHECK(CalledOnValidThread());
- bool result = decode_task_runner_->DeleteSoon(FROM_HERE, core_.release());
- DCHECK(result);
-}
-
-void SoftwareVideoRenderer::OnSessionConfig(
- const protocol::SessionConfig& config) {
- DCHECK(CalledOnValidThread());
- decode_task_runner_->PostTask(
- FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::OnSessionConfig,
- base::Unretained(core_.get()), config));
-}
-
ChromotingStats* SoftwareVideoRenderer::GetStats() {
- DCHECK(CalledOnValidThread());
+ DCHECK(thread_checker_.CalledOnValidThread());
return &stats_;
}
protocol::VideoStub* SoftwareVideoRenderer::GetVideoStub() {
+ DCHECK(thread_checker_.CalledOnValidThread());
return this;
}
void SoftwareVideoRenderer::ProcessVideoPacket(scoped_ptr<VideoPacket> packet,
const base::Closure& done) {
- DCHECK(CalledOnValidThread());
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ base::ScopedClosureRunner done_runner(done);
stats_.RecordVideoPacketStats(*packet);
// If the video packet is empty then drop it. Empty packets are used to
// maintain activity on the network.
if (!packet->has_data() || packet->data().size() == 0) {
- done.Run();
return;
}
- // Measure the latency between the last packet being received and presented.
- base::Time decode_start = base::Time::Now();
+ if (packet->format().has_screen_width() &&
+ packet->format().has_screen_height()) {
+ source_size_.set(packet->format().screen_width(),
+ packet->format().screen_height());
+ }
- base::Closure decode_done = base::Bind(&SoftwareVideoRenderer::OnPacketDone,
- weak_factory_.GetWeakPtr(),
- decode_start, done);
+ if (packet->format().has_x_dpi() && packet->format().has_y_dpi()) {
+ webrtc::DesktopVector source_dpi(packet->format().x_dpi(),
+ packet->format().y_dpi());
+ if (!source_dpi.equals(source_dpi_)) {
+ source_dpi_ = source_dpi;
+ }
+ }
- decode_task_runner_->PostTask(FROM_HERE, base::Bind(
- &SoftwareVideoRenderer::Core::DecodePacket,
- base::Unretained(core_.get()), base::Passed(&packet), decode_done));
-}
+ if (source_size_.is_empty()) {
+ LOG(ERROR) << "Received VideoPacket with unknown size.";
+ return;
+ }
-void SoftwareVideoRenderer::DrawBuffer(webrtc::DesktopFrame* buffer) {
- decode_task_runner_->PostTask(
- FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::DrawBuffer,
- base::Unretained(core_.get()), buffer));
-}
+ scoped_ptr<webrtc::DesktopFrame> frame =
+ consumer_->AllocateFrame(source_size_);
+ frame->set_dpi(source_dpi_);
-void SoftwareVideoRenderer::InvalidateRegion(
- const webrtc::DesktopRegion& region) {
- decode_task_runner_->PostTask(
- FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::InvalidateRegion,
- base::Unretained(core_.get()), region));
+ base::PostTaskAndReplyWithResult(
+ decode_task_runner_.get(), FROM_HERE,
+ base::Bind(&DoDecodeFrame, decoder_.get(), base::Passed(&packet),
+ base::Passed(&frame)),
+ base::Bind(&SoftwareVideoRenderer::RenderFrame,
+ weak_factory_.GetWeakPtr(), base::TimeTicks::Now(),
+ done_runner.Release()));
}
-void SoftwareVideoRenderer::RequestReturnBuffers(const base::Closure& done) {
- decode_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&SoftwareVideoRenderer::Core::RequestReturnBuffers,
- base::Unretained(core_.get()), done));
-}
+void SoftwareVideoRenderer::RenderFrame(
+ base::TimeTicks decode_start_time,
+ const base::Closure& done,
+ scoped_ptr<webrtc::DesktopFrame> frame) {
+ DCHECK(thread_checker_.CalledOnValidThread());
-void SoftwareVideoRenderer::SetOutputSizeAndClip(
- const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area) {
- decode_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&SoftwareVideoRenderer::Core::SetOutputSizeAndClip,
- base::Unretained(core_.get()), view_size, clip_area));
+ stats_.RecordDecodeTime(
+ (base::TimeTicks::Now() - decode_start_time).InMilliseconds());
+
+ if (!frame) {
+ if (!done.is_null())
+ done.Run();
+ return;
+ }
+
+ consumer_->DrawFrame(
+ frame.Pass(),
+ base::Bind(&SoftwareVideoRenderer::OnFrameRendered,
+ weak_factory_.GetWeakPtr(), base::TimeTicks::Now(), done));
}
-void SoftwareVideoRenderer::OnPacketDone(base::Time decode_start,
- const base::Closure& done) {
- DCHECK(CalledOnValidThread());
+void SoftwareVideoRenderer::OnFrameRendered(base::TimeTicks paint_start_time,
+ const base::Closure& done) {
+ DCHECK(thread_checker_.CalledOnValidThread());
- // Record the latency between the packet being received and presented.
- base::TimeDelta decode_time = base::Time::Now() - decode_start;
- stats_.RecordDecodeTime(decode_time.InMilliseconds());
+ stats_.RecordPaintTime(
+ (base::TimeTicks::Now() - paint_start_time).InMilliseconds());
- done.Run();
+ if (!done.is_null())
+ done.Run();
}
} // namespace remoting
diff --git a/remoting/client/software_video_renderer.h b/remoting/client/software_video_renderer.h
index 4fbf7fd..595d22a 100644
--- a/remoting/client/software_video_renderer.h
+++ b/remoting/client/software_video_renderer.h
@@ -7,9 +7,9 @@
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/threading/thread_checker.h"
#include "remoting/client/chromoting_stats.h"
-#include "remoting/client/frame_consumer_proxy.h"
-#include "remoting/client/frame_producer.h"
#include "remoting/client/video_renderer.h"
#include "remoting/protocol/video_stub.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
@@ -18,28 +18,28 @@ namespace base {
class SingleThreadTaskRunner;
} // namespace base
+namespace webrtc {
+class DesktopFrame;
+} // namespace webrtc;
+
namespace remoting {
class ChromotingStats;
+class FrameConsumer;
+class VideoDecoder;
// Implementation of VideoRenderer interface that decodes frame on CPU (on a
// decode thread) and then passes decoded frames to a FrameConsumer.
-// FrameProducer methods can be called on any thread. All other methods must be
-// called on the main thread. Owned must ensure that this class outlives
-// FrameConsumer (which calls FrameProducer interface).
class SoftwareVideoRenderer : public VideoRenderer,
- public protocol::VideoStub,
- public FrameProducer,
- public base::NonThreadSafe {
+ public protocol::VideoStub {
public:
// Creates an update decoder on |main_task_runner_| and |decode_task_runner_|,
// outputting to |consumer|. The |main_task_runner_| is responsible for
// receiving and queueing packets. The |decode_task_runner_| is responsible
// for decoding the video packets.
SoftwareVideoRenderer(
- scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
- scoped_ptr<FrameConsumerProxy> consumer);
+ FrameConsumer* consumer);
~SoftwareVideoRenderer() override;
// VideoRenderer interface.
@@ -51,31 +51,25 @@ class SoftwareVideoRenderer : public VideoRenderer,
void ProcessVideoPacket(scoped_ptr<VideoPacket> packet,
const base::Closure& done) override;
- // FrameProducer implementation. These methods may be called before we are
- // Initialize()d, or we know the source screen size. These methods may be
- // called on any thread.
- //
- // TODO(sergeyu): On Android a separate display thread is used for drawing.
- // FrameConsumer calls FrameProducer on that thread. Can we avoid having a
- // separate display thread? E.g. can we do everything on the decode thread?
- void DrawBuffer(webrtc::DesktopFrame* buffer) override;
- void InvalidateRegion(const webrtc::DesktopRegion& region) override;
- void RequestReturnBuffers(const base::Closure& done) override;
- void SetOutputSizeAndClip(const webrtc::DesktopSize& view_size,
- const webrtc::DesktopRect& clip_area) override;
-
private:
- class Core;
-
- // Callback method when a VideoPacket is processed. |decode_start| contains
- // the timestamp when the packet will start to be processed.
- void OnPacketDone(base::Time decode_start, const base::Closure& done);
+ void RenderFrame(base::TimeTicks decode_start_time,
+ const base::Closure& done,
+ scoped_ptr<webrtc::DesktopFrame> frame);
+ void OnFrameRendered(base::TimeTicks paint_start_time,
+ const base::Closure& done);
scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner_;
- scoped_ptr<Core> core_;
+ FrameConsumer* consumer_;
+
+ scoped_ptr<VideoDecoder> decoder_;
+
+ webrtc::DesktopSize source_size_;
+ webrtc::DesktopVector source_dpi_;
ChromotingStats stats_;
+ base::ThreadChecker thread_checker_;
+
base::WeakPtrFactory<SoftwareVideoRenderer> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(SoftwareVideoRenderer);
diff --git a/remoting/client/software_video_renderer_unittest.cc b/remoting/client/software_video_renderer_unittest.cc
new file mode 100644
index 0000000..e10a034
--- /dev/null
+++ b/remoting/client/software_video_renderer_unittest.cc
@@ -0,0 +1,182 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "remoting/client/software_video_renderer.h"
+
+#include <vector>
+
+#include "base/bind.h"
+#include "base/memory/scoped_vector.h"
+#include "base/message_loop/message_loop.h"
+#include "base/run_loop.h"
+#include "base/threading/thread.h"
+#include "remoting/client/frame_consumer.h"
+#include "remoting/codec/video_encoder_verbatim.h"
+#include "remoting/proto/video.pb.h"
+#include "remoting/protocol/session_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
+
+using webrtc::DesktopFrame;
+
+namespace remoting {
+
+namespace {
+
+const int kFrameWidth = 200;
+const int kFrameHeight = 200;
+
+class TestFrameConsumer : public FrameConsumer {
+ public:
+ TestFrameConsumer() {}
+ ~TestFrameConsumer() override {}
+
+ scoped_ptr<DesktopFrame> WaitForNextFrame(
+ base::Closure* out_done_callback) {
+ EXPECT_TRUE(thread_checker_.CalledOnValidThread());
+ frame_run_loop_.reset(new base::RunLoop());
+ frame_run_loop_->Run();
+ frame_run_loop_.reset();
+ *out_done_callback = last_frame_done_callback_;
+ last_frame_done_callback_.Reset();
+ return last_frame_.Pass();
+ }
+
+ // FrameConsumer interface.
+ scoped_ptr<DesktopFrame> AllocateFrame(
+ const webrtc::DesktopSize& size) override {
+ EXPECT_TRUE(thread_checker_.CalledOnValidThread());
+ return make_scoped_ptr(new webrtc::BasicDesktopFrame(size));
+ }
+
+ void DrawFrame(scoped_ptr<DesktopFrame> frame,
+ const base::Closure& done) override {
+ EXPECT_TRUE(thread_checker_.CalledOnValidThread());
+ last_frame_ = frame.Pass();
+ last_frame_done_callback_ = done;
+ frame_run_loop_->Quit();
+ }
+
+ PixelFormat GetPixelFormat() override {
+ EXPECT_TRUE(thread_checker_.CalledOnValidThread());
+ return FORMAT_BGRA;
+ }
+
+ private:
+ base::ThreadChecker thread_checker_;
+
+ scoped_ptr<base::RunLoop> frame_run_loop_;
+
+ scoped_ptr<DesktopFrame> last_frame_;
+ base::Closure last_frame_done_callback_;
+};
+
+scoped_ptr<DesktopFrame> CreateTestFrame(int index) {
+ scoped_ptr<DesktopFrame> frame(new webrtc::BasicDesktopFrame(
+ webrtc::DesktopSize(kFrameWidth, kFrameHeight)));
+
+ for (int y = 0; y < kFrameHeight; y++) {
+ for (int x = 0; x < kFrameWidth; x++) {
+ uint8_t* out = frame->data() + x * DesktopFrame::kBytesPerPixel +
+ y * frame->stride();
+ out[0] = index + x + y * kFrameWidth;
+ out[1] = index + x + y * kFrameWidth + 1;
+ out[2] = index + x + y * kFrameWidth + 2;
+ out[3] = 0;
+ }
+ }
+
+ if (index == 0) {
+ frame->mutable_updated_region()->SetRect(
+ webrtc::DesktopRect::MakeWH(kFrameWidth, kFrameHeight));
+ } else {
+ frame->mutable_updated_region()->SetRect(
+ webrtc::DesktopRect::MakeWH(index, index));
+ }
+
+ return frame.Pass();
+}
+
+// Returns true when frames a and b are equivalent.
+bool CompareFrames(const DesktopFrame& a, const DesktopFrame& b) {
+ if (!a.size().equals(b.size()) ||
+ !a.updated_region().Equals(b.updated_region())) {
+ return false;
+ }
+
+ for (webrtc::DesktopRegion::Iterator i(a.updated_region()); !i.IsAtEnd();
+ i.Advance()) {
+ for (int row = i.rect().top(); row < i.rect().bottom(); ++row) {
+ if (memcmp(a.data() + a.stride() * row +
+ i.rect().left() * DesktopFrame::kBytesPerPixel,
+ b.data() + b.stride() * row +
+ i.rect().left() * DesktopFrame::kBytesPerPixel,
+ i.rect().width() * DesktopFrame::kBytesPerPixel) != 0) {
+ return false;
+ }
+ }
+ }
+
+ return true;
+}
+
+// Helper to set value at |out| to 1.
+void SetTrue(int* out) {
+ *out = 1;
+}
+
+} // namespace
+
+class SoftwareVideoRendererTest : public ::testing::Test {
+ public:
+ SoftwareVideoRendererTest() : decode_thread_("TestDecodeThread") {
+ decode_thread_.Start();
+ renderer_.reset(new SoftwareVideoRenderer(decode_thread_.task_runner(),
+ &frame_consumer_));
+ renderer_->OnSessionConfig(
+ *protocol::SessionConfig::ForTestWithVerbatimVideo());
+ }
+
+ protected:
+ base::MessageLoop message_loop_;
+ base::Thread decode_thread_;
+
+ TestFrameConsumer frame_consumer_;
+ scoped_ptr<SoftwareVideoRenderer> renderer_;
+
+ VideoEncoderVerbatim encoder_;
+};
+
+TEST_F(SoftwareVideoRendererTest, DecodeFrame) {
+ const int kFrameCount = 5;
+
+ ScopedVector<DesktopFrame> test_frames;
+
+ // std::vector<bool> doesn't allow to get pointer to individual values, so
+ // int needs to be used instead.
+ std::vector<int> callback_called(kFrameCount);
+
+ for (int frame_index = 0; frame_index < kFrameCount; frame_index++) {
+ test_frames.push_back(CreateTestFrame(frame_index));
+ callback_called[frame_index] = 0;
+
+ renderer_->ProcessVideoPacket(
+ encoder_.Encode(*test_frames[frame_index]),
+ base::Bind(&SetTrue, &(callback_called[frame_index])));
+ }
+
+ for (int frame_index = 0; frame_index < kFrameCount; frame_index++) {
+ base::Closure done_callback;
+ scoped_ptr<DesktopFrame> decoded_frame =
+ frame_consumer_.WaitForNextFrame(&done_callback);
+
+ EXPECT_FALSE(callback_called[frame_index]);
+ done_callback.Run();
+ EXPECT_TRUE(callback_called[frame_index]);
+
+ EXPECT_TRUE(CompareFrames(*test_frames[frame_index], *decoded_frame));
+ }
+}
+
+} // namespace remoting
diff --git a/remoting/protocol/monitored_video_stub.cc b/remoting/protocol/monitored_video_stub.cc
index dcc4e1f..245c2ae 100644
--- a/remoting/protocol/monitored_video_stub.cc
+++ b/remoting/protocol/monitored_video_stub.cc
@@ -6,9 +6,7 @@
#include "base/bind.h"
#include "base/logging.h"
-#include "remoting/codec/video_decoder.h"
-#include "remoting/codec/video_decoder_verbatim.h"
-#include "remoting/codec/video_decoder_vpx.h"
+#include "remoting/proto/video.pb.h"
namespace remoting {
namespace protocol {
diff --git a/remoting/protocol/session_config.cc b/remoting/protocol/session_config.cc
index c2b54e4..9034cea 100644
--- a/remoting/protocol/session_config.cc
+++ b/remoting/protocol/session_config.cc
@@ -151,6 +151,14 @@ scoped_ptr<SessionConfig> SessionConfig::ForTest() {
return result.Pass();
}
+scoped_ptr<SessionConfig> SessionConfig::ForTestWithVerbatimVideo() {
+ scoped_ptr<SessionConfig> result = ForTest();
+ result->video_config_ = ChannelConfig(ChannelConfig::TRANSPORT_QUIC_STREAM,
+ kDefaultStreamVersion,
+ ChannelConfig::CODEC_VERBATIM);
+ return result.Pass();
+}
+
SessionConfig::SessionConfig() {}
CandidateSessionConfig::CandidateSessionConfig() {}
diff --git a/remoting/protocol/session_config.h b/remoting/protocol/session_config.h
index 6885de8..cc444a0 100644
--- a/remoting/protocol/session_config.h
+++ b/remoting/protocol/session_config.h
@@ -81,6 +81,7 @@ class SessionConfig {
// Returns a suitable session configuration for use in tests.
static scoped_ptr<SessionConfig> ForTest();
+ static scoped_ptr<SessionConfig> ForTestWithVerbatimVideo();
bool standard_ice() const { return standard_ice_; }
diff --git a/remoting/remoting_srcs.gypi b/remoting/remoting_srcs.gypi
index effc34a..219e624 100644
--- a/remoting/remoting_srcs.gypi
+++ b/remoting/remoting_srcs.gypi
@@ -247,9 +247,6 @@
'client/empty_cursor_filter.cc',
'client/empty_cursor_filter.h',
'client/frame_consumer.h',
- 'client/frame_consumer_proxy.cc',
- 'client/frame_consumer_proxy.h',
- 'client/frame_producer.h',
'client/key_event_mapper.cc',
'client/key_event_mapper.h',
'client/normalizing_input_filter_cros.cc',
diff --git a/remoting/remoting_test.gypi b/remoting/remoting_test.gypi
index e093aa1..2f842f8 100644
--- a/remoting/remoting_test.gypi
+++ b/remoting/remoting_test.gypi
@@ -228,6 +228,7 @@
'client/normalizing_input_filter_cros_unittest.cc',
'client/normalizing_input_filter_mac_unittest.cc',
'client/server_log_entry_client_unittest.cc',
+ 'client/software_video_renderer_unittest.cc',
'client/touch_input_scaler_unittest.cc',
'codec/audio_encoder_opus_unittest.cc',
'codec/codec_test.cc',