summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorsergeyu@chromium.org <sergeyu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-06 10:38:47 +0000
committersergeyu@chromium.org <sergeyu@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-06 10:38:47 +0000
commitfd146835762ea78ac9a9b87f7e489fc64357d2ad (patch)
treeabf845563cdd80f1e643a77407da264a4621167e
parent4950ea06017b4c727edc367f30c1bc9c2c68fee6 (diff)
downloadchromium_src-fd146835762ea78ac9a9b87f7e489fc64357d2ad.zip
chromium_src-fd146835762ea78ac9a9b87f7e489fc64357d2ad.tar.gz
chromium_src-fd146835762ea78ac9a9b87f7e489fc64357d2ad.tar.bz2
Chromoting client: video renderer based on MediaSource API.
The new render is not enabled yet by default, but can be enabled using remoting.settings.USE_MEDIA_SOURCE_RENDERING flag in the webapp. BUG=321825 Review URL: https://codereview.chromium.org/134163005 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@249338 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--remoting/client/plugin/DEPS3
-rw-r--r--remoting/client/plugin/chromoting_instance.cc135
-rw-r--r--remoting/client/plugin/chromoting_instance.h26
-rw-r--r--remoting/client/plugin/media_source_video_renderer.cc189
-rw-r--r--remoting/client/plugin/media_source_video_renderer.h70
-rw-r--r--remoting/client/plugin/pepper_view.h6
-rw-r--r--remoting/remoting_client.gypi3
-rw-r--r--remoting/remoting_webapp_files.gypi3
-rw-r--r--remoting/webapp/client_plugin.js104
-rw-r--r--remoting/webapp/client_session.js41
-rw-r--r--remoting/webapp/js_proto/dom_proto.js66
-rw-r--r--remoting/webapp/main.css32
-rw-r--r--remoting/webapp/main.html6
-rw-r--r--remoting/webapp/media_source_renderer.js86
-rw-r--r--remoting/webapp/plugin_settings.js3
-rw-r--r--third_party/libwebm/libwebm.gyp8
16 files changed, 694 insertions, 87 deletions
diff --git a/remoting/client/plugin/DEPS b/remoting/client/plugin/DEPS
new file mode 100644
index 0000000..dd27068
--- /dev/null
+++ b/remoting/client/plugin/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+third_party/libwebm",
+]
diff --git a/remoting/client/plugin/chromoting_instance.cc b/remoting/client/plugin/chromoting_instance.cc
index b94c207..ef5f428 100644
--- a/remoting/client/plugin/chromoting_instance.cc
+++ b/remoting/client/plugin/chromoting_instance.cc
@@ -34,6 +34,7 @@
#include "remoting/client/client_config.h"
#include "remoting/client/frame_consumer_proxy.h"
#include "remoting/client/plugin/delegating_signal_strategy.h"
+#include "remoting/client/plugin/media_source_video_renderer.h"
#include "remoting/client/plugin/pepper_audio_player.h"
#include "remoting/client/plugin/pepper_input_handler.h"
#include "remoting/client/plugin/pepper_port_allocator.h"
@@ -171,7 +172,7 @@ logging::LogMessageHandlerFunction g_logging_old_handler = NULL;
const char ChromotingInstance::kApiFeatures[] =
"highQualityScaling injectKeyEvent sendClipboardItem remapKey trapKey "
"notifyClientResolution pauseVideo pauseAudio asyncPin thirdPartyAuth "
- "pinlessAuth extensionMessage allowMouseLock";
+ "pinlessAuth extensionMessage allowMouseLock mediaSourceRendering";
const char ChromotingInstance::kRequestedCapabilities[] = "";
const char ChromotingInstance::kSupportedCapabilities[] = "desktopShape";
@@ -205,6 +206,7 @@ ChromotingInstance::ChromotingInstance(PP_Instance pp_instance)
normalizing_input_filter_(CreateNormalizingInputFilter(&key_mapper_)),
input_handler_(this, normalizing_input_filter_.get()),
use_async_pin_dialog_(false),
+ use_media_source_rendering_(false),
weak_factory_(this) {
RequestInputEvents(PP_INPUTEVENT_CLASS_MOUSE | PP_INPUTEVENT_CLASS_WHEEL);
RequestFilteringInputEvents(PP_INPUTEVENT_CLASS_KEYBOARD);
@@ -227,7 +229,7 @@ ChromotingInstance::ChromotingInstance(PP_Instance pp_instance)
data->SetString("requestedCapabilities", kRequestedCapabilities);
data->SetString("supportedCapabilities", kSupportedCapabilities);
- PostChromotingMessage("hello", data.Pass());
+ PostLegacyJsonMessage("hello", data.Pass());
}
ChromotingInstance::~ChromotingInstance() {
@@ -340,6 +342,8 @@ void ChromotingInstance::HandleMessage(const pp::Var& message) {
HandleExtensionMessage(*data);
} else if (method == "allowMouseLock") {
HandleAllowMouseLockMessage();
+ } else if (method == "enableMediaSourceRendering") {
+ HandleEnableMediaSourceRendering();
}
}
@@ -353,10 +357,11 @@ void ChromotingInstance::DidChangeView(const pp::View& view) {
DCHECK(plugin_task_runner_->BelongsToCurrentThread());
plugin_view_ = view;
- if (view_) {
+ mouse_input_filter_.set_input_size(
+ webrtc::DesktopSize(view.GetRect().width(), view.GetRect().height()));
+
+ if (view_)
view_->SetView(view);
- mouse_input_filter_.set_input_size(view_->get_view_size_dips());
- }
}
bool ChromotingInstance::HandleInputEvent(const pp::InputEvent& event) {
@@ -379,7 +384,7 @@ void ChromotingInstance::SetDesktopSize(const webrtc::DesktopSize& size,
data->SetInteger("x_dpi", dpi.x());
if (dpi.y())
data->SetInteger("y_dpi", dpi.y());
- PostChromotingMessage("onDesktopSize", data.Pass());
+ PostLegacyJsonMessage("onDesktopSize", data.Pass());
}
void ChromotingInstance::SetDesktopShape(const webrtc::DesktopRegion& shape) {
@@ -401,7 +406,7 @@ void ChromotingInstance::SetDesktopShape(const webrtc::DesktopRegion& shape) {
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->Set("rects", rects_value.release());
- PostChromotingMessage("onDesktopShape", data.Pass());
+ PostLegacyJsonMessage("onDesktopShape", data.Pass());
}
void ChromotingInstance::OnConnectionState(
@@ -410,7 +415,7 @@ void ChromotingInstance::OnConnectionState(
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetString("state", ConnectionStateToString(state));
data->SetString("error", ConnectionErrorToString(error));
- PostChromotingMessage("onConnectionStatus", data.Pass());
+ PostLegacyJsonMessage("onConnectionStatus", data.Pass());
}
void ChromotingInstance::FetchThirdPartyToken(
@@ -427,13 +432,13 @@ void ChromotingInstance::FetchThirdPartyToken(
data->SetString("tokenUrl", token_url.spec());
data->SetString("hostPublicKey", host_public_key);
data->SetString("scope", scope);
- PostChromotingMessage("fetchThirdPartyToken", data.Pass());
+ PostLegacyJsonMessage("fetchThirdPartyToken", data.Pass());
}
void ChromotingInstance::OnConnectionReady(bool ready) {
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetBoolean("ready", ready);
- PostChromotingMessage("onConnectionReady", data.Pass());
+ PostLegacyJsonMessage("onConnectionReady", data.Pass());
}
void ChromotingInstance::OnRouteChanged(const std::string& channel_name,
@@ -442,13 +447,13 @@ void ChromotingInstance::OnRouteChanged(const std::string& channel_name,
std::string message = "Channel " + channel_name + " using " +
protocol::TransportRoute::GetTypeString(route.type) + " connection.";
data->SetString("message", message);
- PostChromotingMessage("logDebugMessage", data.Pass());
+ PostLegacyJsonMessage("logDebugMessage", data.Pass());
}
void ChromotingInstance::SetCapabilities(const std::string& capabilities) {
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetString("capabilities", capabilities);
- PostChromotingMessage("setCapabilities", data.Pass());
+ PostLegacyJsonMessage("setCapabilities", data.Pass());
}
void ChromotingInstance::SetPairingResponse(
@@ -456,7 +461,7 @@ void ChromotingInstance::SetPairingResponse(
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetString("clientId", pairing_response.client_id());
data->SetString("sharedSecret", pairing_response.shared_secret());
- PostChromotingMessage("pairingResponse", data.Pass());
+ PostLegacyJsonMessage("pairingResponse", data.Pass());
}
void ChromotingInstance::DeliverHostMessage(
@@ -464,7 +469,7 @@ void ChromotingInstance::DeliverHostMessage(
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetString("type", message.type());
data->SetString("data", message.data());
- PostChromotingMessage("extensionMessage", data.Pass());
+ PostLegacyJsonMessage("extensionMessage", data.Pass());
}
void ChromotingInstance::FetchSecretFromDialog(
@@ -477,7 +482,7 @@ void ChromotingInstance::FetchSecretFromDialog(
secret_fetched_callback_ = secret_fetched_callback;
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetBoolean("pairingSupported", pairing_supported);
- PostChromotingMessage("fetchPin", data.Pass());
+ PostLegacyJsonMessage("fetchPin", data.Pass());
}
void ChromotingInstance::FetchSecretFromString(
@@ -510,7 +515,7 @@ void ChromotingInstance::InjectClipboardEvent(
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetString("mimeType", event.mime_type());
data->SetString("item", event.data());
- PostChromotingMessage("injectClipboardItem", data.Pass());
+ PostLegacyJsonMessage("injectClipboardItem", data.Pass());
}
void ChromotingInstance::SetCursorShape(
@@ -580,7 +585,7 @@ void ChromotingInstance::SetCursorShape(
void ChromotingInstance::OnFirstFrameReceived() {
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
- PostChromotingMessage("onFirstFrameReceived", data.Pass());
+ PostLegacyJsonMessage("onFirstFrameReceived", data.Pass());
}
void ChromotingInstance::HandleConnect(const base::DictionaryValue& data) {
@@ -629,23 +634,28 @@ void ChromotingInstance::ConnectWithConfig(const ClientConfig& config,
jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
-
- view_.reset(new PepperView(this, &context_));
- view_weak_factory_.reset(
- new base::WeakPtrFactory<FrameConsumer>(view_.get()));
-
- // SoftwareVideoRenderer runs on a separate thread so for now we wrap
- // PepperView with a ref-counted proxy object.
- scoped_refptr<FrameConsumerProxy> consumer_proxy =
- new FrameConsumerProxy(plugin_task_runner_,
- view_weak_factory_->GetWeakPtr());
-
- SoftwareVideoRenderer* decoder =
- new SoftwareVideoRenderer(context_.main_task_runner(),
- context_.decode_task_runner(),
- consumer_proxy);
- view_->Initialize(decoder);
- video_renderer_.reset(decoder);
+ if (use_media_source_rendering_) {
+ video_renderer_.reset(new MediaSourceVideoRenderer(this));
+ } else {
+ view_.reset(new PepperView(this, &context_));
+ view_weak_factory_.reset(
+ new base::WeakPtrFactory<FrameConsumer>(view_.get()));
+
+ // SoftwareVideoRenderer runs on a separate thread so for now we wrap
+ // PepperView with a ref-counted proxy object.
+ scoped_refptr<FrameConsumerProxy> consumer_proxy =
+ new FrameConsumerProxy(plugin_task_runner_,
+ view_weak_factory_->GetWeakPtr());
+
+ SoftwareVideoRenderer* renderer =
+ new SoftwareVideoRenderer(context_.main_task_runner(),
+ context_.decode_task_runner(),
+ consumer_proxy);
+ view_->Initialize(renderer);
+ if (!plugin_view_.is_null())
+ view_->SetView(plugin_view_);
+ video_renderer_.reset(renderer);
+ }
host_connection_.reset(new protocol::ConnectionToHost(true));
scoped_ptr<AudioPlayer> audio_player(new PepperAudioPlayer(this));
@@ -653,12 +663,12 @@ void ChromotingInstance::ConnectWithConfig(const ClientConfig& config,
this, video_renderer_.get(),
audio_player.Pass()));
- if (!plugin_view_.is_null())
- view_->SetView(plugin_view_);
-
// Connect the input pipeline to the protocol stub & initialize components.
mouse_input_filter_.set_input_stub(host_connection_->input_stub());
- mouse_input_filter_.set_input_size(view_->get_view_size_dips());
+ if (!plugin_view_.is_null()) {
+ mouse_input_filter_.set_input_size(webrtc::DesktopSize(
+ plugin_view_.GetRect().width(), plugin_view_.GetRect().height()));
+ }
VLOG(0) << "Connecting to " << config.host_jid
<< ". Local jid: " << local_jid << ".";
@@ -909,13 +919,25 @@ void ChromotingInstance::HandleAllowMouseLockMessage() {
input_handler_.AllowMouseLock();
}
+void ChromotingInstance::HandleEnableMediaSourceRendering() {
+ use_media_source_rendering_ = true;
+}
+
ChromotingStats* ChromotingInstance::GetStats() {
if (!video_renderer_.get())
return NULL;
return video_renderer_->GetStats();
}
-void ChromotingInstance::PostChromotingMessage(
+void ChromotingInstance::PostChromotingMessage(const std::string& method,
+ const pp::VarDictionary& data) {
+ pp::VarDictionary message;
+ message.Set(pp::Var("method"), pp::Var(method));
+ message.Set(pp::Var("data"), data);
+ PostMessage(message);
+}
+
+void ChromotingInstance::PostLegacyJsonMessage(
const std::string& method,
scoped_ptr<base::DictionaryValue> data) {
scoped_ptr<base::DictionaryValue> message(new base::DictionaryValue());
@@ -931,13 +953,13 @@ void ChromotingInstance::SendTrappedKey(uint32 usb_keycode, bool pressed) {
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetInteger("usbKeycode", usb_keycode);
data->SetBoolean("pressed", pressed);
- PostChromotingMessage("trappedKeyEvent", data.Pass());
+ PostLegacyJsonMessage("trappedKeyEvent", data.Pass());
}
void ChromotingInstance::SendOutgoingIq(const std::string& iq) {
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetString("iq", iq);
- PostChromotingMessage("sendOutgoingIq", data.Pass());
+ PostLegacyJsonMessage("sendOutgoingIq", data.Pass());
}
void ChromotingInstance::SendPerfStats() {
@@ -959,7 +981,7 @@ void ChromotingInstance::SendPerfStats() {
data->SetDouble("decodeLatency", stats->video_decode_ms()->Average());
data->SetDouble("renderLatency", stats->video_paint_ms()->Average());
data->SetDouble("roundtripLatency", stats->round_trip_ms()->Average());
- PostChromotingMessage("onPerfStats", data.Pass());
+ PostLegacyJsonMessage("onPerfStats", data.Pass());
}
// static
@@ -1060,7 +1082,7 @@ void ChromotingInstance::ProcessLogToUI(const std::string& message) {
g_logging_to_plugin = true;
scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
data->SetString("message", message);
- PostChromotingMessage("logDebugMessage", data.Pass());
+ PostLegacyJsonMessage("logDebugMessage", data.Pass());
g_logging_to_plugin = false;
}
@@ -1085,4 +1107,31 @@ bool ChromotingInstance::IsConnected() {
(host_connection_->state() == protocol::ConnectionToHost::CONNECTED);
}
+void ChromotingInstance::OnMediaSourceSize(const webrtc::DesktopSize& size,
+ const webrtc::DesktopVector& dpi) {
+ SetDesktopSize(size, dpi);
+}
+
+void ChromotingInstance::OnMediaSourceShape(
+ const webrtc::DesktopRegion& shape) {
+ SetDesktopShape(shape);
+}
+
+void ChromotingInstance::OnMediaSourceReset(const std::string& format) {
+ scoped_ptr<base::DictionaryValue> data(new base::DictionaryValue());
+ data->SetString("format", format);
+ PostLegacyJsonMessage("mediaSourceReset", data.Pass());
+}
+
+void ChromotingInstance::OnMediaSourceData(uint8_t* buffer,
+ size_t buffer_size) {
+ pp::VarArrayBuffer array_buffer(buffer_size);
+ void* data_ptr = array_buffer.Map();
+ memcpy(data_ptr, buffer, buffer_size);
+ array_buffer.Unmap();
+ pp::VarDictionary data_dictionary;
+ data_dictionary.Set(pp::Var("buffer"), array_buffer);
+ PostChromotingMessage("mediaSourceData", data_dictionary);
+}
+
} // namespace remoting
diff --git a/remoting/client/plugin/chromoting_instance.h b/remoting/client/plugin/chromoting_instance.h
index 3b0ad76..b5e480d 100644
--- a/remoting/client/plugin/chromoting_instance.h
+++ b/remoting/client/plugin/chromoting_instance.h
@@ -21,6 +21,7 @@
#include "remoting/client/client_context.h"
#include "remoting/client/client_user_interface.h"
#include "remoting/client/key_event_mapper.h"
+#include "remoting/client/plugin/media_source_video_renderer.h"
#include "remoting/client/plugin/normalizing_input_filter.h"
#include "remoting/client/plugin/pepper_input_handler.h"
#include "remoting/client/plugin/pepper_plugin_thread_delegate.h"
@@ -68,6 +69,7 @@ struct ClientConfig;
class ChromotingInstance :
public ClientUserInterface,
+ public MediaSourceVideoRenderer::Delegate,
public protocol::ClipboardStub,
public protocol::CursorShapeStub,
public pp::Instance {
@@ -204,6 +206,7 @@ class ChromotingInstance :
void HandleRequestPairing(const base::DictionaryValue& data);
void HandleExtensionMessage(const base::DictionaryValue& data);
void HandleAllowMouseLockMessage();
+ void HandleEnableMediaSourceRendering();
// Helper method called from Connect() to connect with parsed config.
void ConnectWithConfig(const ClientConfig& config,
@@ -211,7 +214,16 @@ class ChromotingInstance :
// Helper method to post messages to the webapp.
void PostChromotingMessage(const std::string& method,
- scoped_ptr<base::DictionaryValue> data);
+ const pp::VarDictionary& data);
+
+ // Same as above, but serializes messages to JSON before sending them. This
+ // method is used for backward compatibility with older version of the webapp
+ // that expect to received most messages formatted using JSON.
+ //
+ // TODO(sergeyu): When all current versions of the webapp support raw messages
+ // remove this method and use PostChromotingMessage() instead.
+ void PostLegacyJsonMessage(const std::string& method,
+ scoped_ptr<base::DictionaryValue> data);
// Posts trapped keys to the web-app to handle.
void SendTrappedKey(uint32 usb_keycode, bool pressed);
@@ -235,6 +247,13 @@ class ChromotingInstance :
bool pairing_supported,
const protocol::SecretFetchedCallback& secret_fetched_callback);
+ // MediaSourceVideoRenderer::Delegate implementation.
+ virtual void OnMediaSourceSize(const webrtc::DesktopSize& size,
+ const webrtc::DesktopVector& dpi) OVERRIDE;
+ virtual void OnMediaSourceShape(const webrtc::DesktopRegion& shape) OVERRIDE;
+ virtual void OnMediaSourceReset(const std::string& format) OVERRIDE;
+ virtual void OnMediaSourceData(uint8_t* buffer, size_t buffer_size) OVERRIDE;
+
bool initialized_;
PepperPluginThreadDelegate plugin_thread_delegate_;
@@ -264,6 +283,11 @@ class ChromotingInstance :
bool use_async_pin_dialog_;
protocol::SecretFetchedCallback secret_fetched_callback_;
+ // Set to true if the webapp has requested to use MediaSource API for
+ // rendering. In that case all the encoded video will be passed to the
+ // webapp for decoding.
+ bool use_media_source_rendering_;
+
base::WeakPtr<PepperTokenFetcher> pepper_token_fetcher_;
// Weak reference to this instance, used for global logging and task posting.
diff --git a/remoting/client/plugin/media_source_video_renderer.cc b/remoting/client/plugin/media_source_video_renderer.cc
new file mode 100644
index 0000000..f14a61e
--- /dev/null
+++ b/remoting/client/plugin/media_source_video_renderer.cc
@@ -0,0 +1,189 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "remoting/client/plugin/media_source_video_renderer.h"
+
+#include <string.h>
+
+#include "base/callback_helpers.h"
+#include "base/logging.h"
+#include "remoting/proto/video.pb.h"
+#include "remoting/protocol/session_config.h"
+#include "third_party/libwebm/source/mkvmuxer.hpp"
+
+namespace remoting {
+
+static int kFrameIntervalNs = 1000000;
+
+class MediaSourceVideoRenderer::VideoWriter : public mkvmuxer::IMkvWriter {
+ public:
+ typedef std::vector<uint8_t> DataBuffer;
+
+ VideoWriter(const webrtc::DesktopSize& frame_size);
+ virtual ~VideoWriter();
+
+ const webrtc::DesktopSize& size() { return frame_size_; }
+ int64_t last_frame_timestamp() { return timecode_ - kFrameIntervalNs; }
+
+ // IMkvWriter interface.
+ virtual mkvmuxer::int32 Write(const void* buf, mkvmuxer::uint32 len) OVERRIDE;
+ virtual mkvmuxer::int64 Position() const OVERRIDE;
+ virtual mkvmuxer::int32 Position(mkvmuxer::int64 position) OVERRIDE;
+ virtual bool Seekable() const OVERRIDE;
+ virtual void ElementStartNotify(mkvmuxer::uint64 element_id,
+ mkvmuxer::int64 position) OVERRIDE;
+
+ scoped_ptr<DataBuffer> OnVideoFrame(const std::string& video_data);
+
+ private:
+ webrtc::DesktopSize frame_size_;
+ scoped_ptr<DataBuffer> output_data_;
+ int64_t position_;
+ scoped_ptr<mkvmuxer::Segment> segment_;
+ int64_t timecode_;
+};
+
+MediaSourceVideoRenderer::VideoWriter::VideoWriter(
+ const webrtc::DesktopSize& frame_size)
+ : frame_size_(frame_size),
+ position_(0),
+ timecode_(0) {
+ segment_.reset(new mkvmuxer::Segment());
+ segment_->Init(this);
+ segment_->set_mode(mkvmuxer::Segment::kLive);
+ segment_->AddVideoTrack(frame_size_.width(), frame_size_.height(), 1);
+ mkvmuxer::SegmentInfo* const info = segment_->GetSegmentInfo();
+ info->set_writing_app("ChromotingViewer");
+ info->set_muxing_app("ChromotingViewer");
+}
+
+MediaSourceVideoRenderer::VideoWriter::~VideoWriter() {}
+
+mkvmuxer::int32 MediaSourceVideoRenderer::VideoWriter::Write(
+ const void* buf,
+ mkvmuxer::uint32 len) {
+ output_data_->insert(output_data_->end(),
+ reinterpret_cast<const char*>(buf),
+ reinterpret_cast<const char*>(buf) + len);
+ position_ += len;
+ return 0;
+}
+
+mkvmuxer::int64 MediaSourceVideoRenderer::VideoWriter::Position() const {
+ return position_;
+}
+
+mkvmuxer::int32 MediaSourceVideoRenderer::VideoWriter::Position(
+ mkvmuxer::int64 position) {
+ return -1;
+}
+
+bool MediaSourceVideoRenderer::VideoWriter::Seekable() const {
+ return false;
+}
+
+void MediaSourceVideoRenderer::VideoWriter::ElementStartNotify(
+ mkvmuxer::uint64 element_id,
+ mkvmuxer::int64 position) {
+}
+
+scoped_ptr<MediaSourceVideoRenderer::VideoWriter::DataBuffer>
+MediaSourceVideoRenderer::VideoWriter::OnVideoFrame(
+ const std::string& video_data) {
+ DCHECK(!output_data_);
+
+ output_data_.reset(new DataBuffer());
+ bool first_frame = (timecode_ == 0);
+ segment_->AddFrame(reinterpret_cast<const uint8_t*>(video_data.data()),
+ video_data.size(), 1, timecode_, first_frame);
+ timecode_ += kFrameIntervalNs;
+ return output_data_.Pass();
+}
+
+MediaSourceVideoRenderer::MediaSourceVideoRenderer(Delegate* delegate)
+ : delegate_(delegate),
+ latest_sequence_number_(0) {
+}
+
+MediaSourceVideoRenderer::~MediaSourceVideoRenderer() {}
+
+void MediaSourceVideoRenderer::Initialize(
+ const protocol::SessionConfig& config) {
+ DCHECK_EQ(config.video_config().codec, protocol::ChannelConfig::CODEC_VP8);
+}
+
+ChromotingStats* MediaSourceVideoRenderer::GetStats() {
+ return &stats_;
+}
+
+void MediaSourceVideoRenderer::ProcessVideoPacket(
+ scoped_ptr<VideoPacket> packet,
+ const base::Closure& done) {
+ base::ScopedClosureRunner done_runner(done);
+
+ // Don't need to do anything if the packet is empty. Host sends empty video
+ // packets when the screen is not changing.
+ if (!packet->data().size())
+ return;
+
+ // Update statistics.
+ stats_.video_frame_rate()->Record(1);
+ stats_.video_bandwidth()->Record(packet->data().size());
+ if (packet->has_capture_time_ms())
+ stats_.video_capture_ms()->Record(packet->capture_time_ms());
+ if (packet->has_encode_time_ms())
+ stats_.video_encode_ms()->Record(packet->encode_time_ms());
+ if (packet->has_client_sequence_number() &&
+ packet->client_sequence_number() > latest_sequence_number_) {
+ latest_sequence_number_ = packet->client_sequence_number();
+ base::TimeDelta round_trip_latency =
+ base::Time::Now() -
+ base::Time::FromInternalValue(packet->client_sequence_number());
+ stats_.round_trip_ms()->Record(round_trip_latency.InMilliseconds());
+ }
+
+ bool media_source_needs_reset = false;
+
+ webrtc::DesktopSize frame_size(packet->format().screen_width(),
+ packet->format().screen_height());
+ if (!writer_ ||
+ (!writer_->size().equals(frame_size) && !frame_size.is_empty())) {
+ media_source_needs_reset = true;
+ writer_.reset(new VideoWriter(frame_size));
+ delegate_->OnMediaSourceReset("video/webm; codecs=\"vp8\"");
+ }
+
+ webrtc::DesktopVector frame_dpi(packet->format().x_dpi(),
+ packet->format().y_dpi());
+ if (media_source_needs_reset || !frame_dpi_.equals(frame_dpi)) {
+ frame_dpi_ = frame_dpi;
+ delegate_->OnMediaSourceSize(frame_size, frame_dpi);
+ }
+
+ // Update the desktop shape region.
+ webrtc::DesktopRegion desktop_shape;
+ if (packet->has_use_desktop_shape()) {
+ for (int i = 0; i < packet->desktop_shape_rects_size(); ++i) {
+ Rect remoting_rect = packet->desktop_shape_rects(i);
+ desktop_shape.AddRect(webrtc::DesktopRect::MakeXYWH(
+ remoting_rect.x(), remoting_rect.y(),
+ remoting_rect.width(), remoting_rect.height()));
+ }
+ } else {
+ // Fallback for the case when the host didn't include the desktop shape.
+ desktop_shape =
+ webrtc::DesktopRegion(webrtc::DesktopRect::MakeSize(frame_size));
+ }
+
+ if (!desktop_shape_.Equals(desktop_shape)) {
+ desktop_shape_.Swap(&desktop_shape);
+ delegate_->OnMediaSourceShape(desktop_shape_);
+ }
+
+ scoped_ptr<VideoWriter::DataBuffer> buffer =
+ writer_->OnVideoFrame(packet->data());
+ delegate_->OnMediaSourceData(&(*(buffer->begin())), buffer->size());
+}
+
+} // namespace remoting
diff --git a/remoting/client/plugin/media_source_video_renderer.h b/remoting/client/plugin/media_source_video_renderer.h
new file mode 100644
index 0000000..2df7758
--- /dev/null
+++ b/remoting/client/plugin/media_source_video_renderer.h
@@ -0,0 +1,70 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef REMOTING_CLIENT_PLUGIN_MEDIA_SOURCE_VIDEO_RENDERER_H_
+#define REMOTING_CLIENT_PLUGIN_MEDIA_SOURCE_VIDEO_RENDERER_H_
+
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/callback.h"
+#include "base/memory/scoped_ptr.h"
+#include "remoting/client/chromoting_stats.h"
+#include "remoting/client/video_renderer.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
+
+namespace remoting {
+
+// VideoRenderer implementation that packs data into a WebM stream that can be
+// passed to <video> tag using MediaSource API.
+class MediaSourceVideoRenderer : public VideoRenderer {
+ public:
+ class Delegate {
+ public:
+ Delegate() {}
+ virtual ~Delegate() {}
+
+ // Called when stream size changes.
+ virtual void OnMediaSourceSize(const webrtc::DesktopSize& size,
+ const webrtc::DesktopVector& dpi) = 0;
+
+ // Called when desktop shape changes.
+ virtual void OnMediaSourceShape(const webrtc::DesktopRegion& shape) = 0;
+
+ // Called when the MediaSource needs to be reset (e.g. because screen size
+ // has changed).
+ virtual void OnMediaSourceReset(const std::string& format) = 0;
+
+ // Called when new data becomes available.
+ virtual void OnMediaSourceData(uint8_t* buffer, size_t buffer_size) = 0;
+ };
+
+ MediaSourceVideoRenderer(Delegate* delegate);
+ virtual ~MediaSourceVideoRenderer();
+
+ // VideoRenderer interface.
+ virtual void Initialize(const protocol::SessionConfig& config) OVERRIDE;
+ virtual ChromotingStats* GetStats() OVERRIDE;
+ virtual void ProcessVideoPacket(scoped_ptr<VideoPacket> packet,
+ const base::Closure& done) OVERRIDE;
+
+ private:
+ // Helper class used to generate WebM stream.
+ class VideoWriter;
+
+ Delegate* delegate_;
+ scoped_ptr<VideoWriter> writer_;
+ webrtc::DesktopVector frame_dpi_;
+ webrtc::DesktopRegion desktop_shape_;
+
+ ChromotingStats stats_;
+ int64 latest_sequence_number_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaSourceVideoRenderer);
+};
+
+} // namespace remoting
+
+#endif // REMOTING_CLIENT_PLUGIN_MEDIA_SOURCE_VIDEO_RENDERER_H_
diff --git a/remoting/client/plugin/pepper_view.h b/remoting/client/plugin/pepper_view.h
index 4f35277..098ab68 100644
--- a/remoting/client/plugin/pepper_view.h
+++ b/remoting/client/plugin/pepper_view.h
@@ -64,12 +64,6 @@ class PepperView : public FrameConsumer {
return source_size_;
}
- // Return the dimensions of the view in Density Independent Pixels (DIPs).
- // Note that there may be multiple device pixels per DIP.
- const webrtc::DesktopSize& get_view_size_dips() const {
- return dips_size_;
- }
-
private:
// Allocates a new frame buffer to supply to the FrameProducer to render into.
// Returns NULL if the maximum number of buffers has already been allocated.
diff --git a/remoting/remoting_client.gypi b/remoting/remoting_client.gypi
index 36f2c4d..3b3cd7b 100644
--- a/remoting/remoting_client.gypi
+++ b/remoting/remoting_client.gypi
@@ -25,6 +25,8 @@
'client/plugin/chromoting_instance.h',
'client/plugin/delegating_signal_strategy.cc',
'client/plugin/delegating_signal_strategy.h',
+ 'client/plugin/media_source_video_renderer.cc',
+ 'client/plugin/media_source_video_renderer.h',
'client/plugin/normalizing_input_filter.cc',
'client/plugin/normalizing_input_filter.h',
'client/plugin/normalizing_input_filter_cros.cc',
@@ -68,6 +70,7 @@
'remoting_protocol',
'../third_party/libyuv/libyuv.gyp:libyuv',
'../third_party/webrtc/modules/modules.gyp:desktop_capture',
+ '../third_party/libwebm/libwebm.gyp:libwebm',
],
'sources': [
'client/audio_decode_scheduler.cc',
diff --git a/remoting/remoting_webapp_files.gypi b/remoting/remoting_webapp_files.gypi
index 707f102..f903ead 100644
--- a/remoting/remoting_webapp_files.gypi
+++ b/remoting/remoting_webapp_files.gypi
@@ -33,7 +33,7 @@
'webapp/oauth2.js',
'webapp/oauth2_api.js',
],
- # Client JavaScript files..
+ # Client JavaScript files.
'remoting_webapp_js_client_files': [
'webapp/client_plugin.js',
# TODO(garykac) For client_screen:
@@ -42,6 +42,7 @@
'webapp/client_screen.js',
'webapp/client_session.js',
'webapp/clipboard.js',
+ 'webapp/media_source_renderer.js',
'webapp/session_connector.js',
],
# Remoting core JavaScript files.
diff --git a/remoting/webapp/client_plugin.js b/remoting/webapp/client_plugin.js
index 7ef05c7..60cb6cb 100644
--- a/remoting/webapp/client_plugin.js
+++ b/remoting/webapp/client_plugin.js
@@ -43,6 +43,7 @@ remoting.ClientPlugin = function(plugin, onExtensionMessage) {
this.onConnectionStatusUpdateHandler = function(state, error) {};
/** @param {boolean} ready Connection ready state. */
this.onConnectionReadyHandler = function(ready) {};
+
/**
* @param {string} tokenUrl Token-request URL, received from the host.
* @param {string} hostPublicKey Public key for the host.
@@ -55,6 +56,9 @@ remoting.ClientPlugin = function(plugin, onExtensionMessage) {
this.onSetCapabilitiesHandler = function (capabilities) {};
this.fetchPinHandler = function (supportsPairing) {};
+ /** @type {remoting.MediaSourceRenderer} */
+ this.mediaSourceRenderer_ = null;
+
/** @type {number} */
this.pluginApiVersion_ = -1;
/** @type {Array.<string>} */
@@ -97,7 +101,8 @@ remoting.ClientPlugin.Feature = {
THIRD_PARTY_AUTH: 'thirdPartyAuth',
TRAP_KEY: 'trapKey',
PINLESS_AUTH: 'pinlessAuth',
- EXTENSION_MESSAGE: 'extensionMessage'
+ EXTENSION_MESSAGE: 'extensionMessage',
+ MEDIA_SOURCE_RENDERING: 'mediaSourceRendering'
};
/**
@@ -121,14 +126,18 @@ remoting.ClientPlugin.prototype.API_VERSION_ = 6;
remoting.ClientPlugin.prototype.API_MIN_VERSION_ = 5;
/**
- * @param {string} messageStr Message from the plugin.
+ * @param {string|{method:string, data:Object.<string,*>}}
+ * rawMessage Message from the plugin.
+ * @private
*/
-remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
- var message = /** @type {{method:string, data:Object.<string,string>}} */
- jsonParseSafe(messageStr);
+remoting.ClientPlugin.prototype.handleMessage_ = function(rawMessage) {
+ var message =
+ /** @type {{method:string, data:Object.<string,*>}} */
+ ((typeof(rawMessage) == 'string') ? jsonParseSafe(rawMessage)
+ : rawMessage);
if (!message || !('method' in message) || !('data' in message)) {
- console.error('Received invalid message from the plugin: ' + messageStr);
+ console.error('Received invalid message from the plugin:', rawMessage);
return;
}
@@ -149,18 +158,18 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
this.plugin.height = 0;
if (typeof message.data['apiVersion'] != 'number' ||
typeof message.data['apiMinVersion'] != 'number') {
- console.error('Received invalid hello message: ' + messageStr);
+ console.error('Received invalid hello message:', rawMessage);
return;
}
this.pluginApiVersion_ = /** @type {number} */ message.data['apiVersion'];
if (this.pluginApiVersion_ >= 7) {
if (typeof message.data['apiFeatures'] != 'string') {
- console.error('Received invalid hello message: ' + messageStr);
+ console.error('Received invalid hello message:', rawMessage);
return;
}
this.pluginApiFeatures_ =
- /** @type {Array.<string>} */ tokenize(message.data['apiFeatures']);
+ tokenize((/** @type {string} */ message.data['apiFeatures']));
// Negotiate capabilities.
@@ -168,20 +177,22 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
var requestedCapabilities = [];
if ('requestedCapabilities' in message.data) {
if (typeof message.data['requestedCapabilities'] != 'string') {
- console.error('Received invalid hello message: ' + messageStr);
+ console.error('Received invalid hello message:', rawMessage);
return;
}
- requestedCapabilities = tokenize(message.data['requestedCapabilities']);
+ requestedCapabilities = tokenize(
+ (/** @type {string} */ message.data['requestedCapabilities']));
}
/** @type {!Array.<string>} */
var supportedCapabilities = [];
if ('supportedCapabilities' in message.data) {
if (typeof message.data['supportedCapabilities'] != 'string') {
- console.error('Received invalid hello message: ' + messageStr);
+ console.error('Received invalid hello message:', rawMessage);
return;
}
- supportedCapabilities = tokenize(message.data['supportedCapabilities']);
+ supportedCapabilities = tokenize(
+ (/** @type {string} */ message.data['requestedCapabilities']));
}
// At the moment the webapp does not recognize any of
@@ -213,22 +224,22 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
}
} else if (message.method == 'sendOutgoingIq') {
if (typeof message.data['iq'] != 'string') {
- console.error('Received invalid sendOutgoingIq message: ' + messageStr);
+ console.error('Received invalid sendOutgoingIq message:', rawMessage);
return;
}
- this.onOutgoingIqHandler(message.data['iq']);
+ this.onOutgoingIqHandler((/** @type {string} */ message.data['iq']));
} else if (message.method == 'logDebugMessage') {
if (typeof message.data['message'] != 'string') {
- console.error('Received invalid logDebugMessage message: ' + messageStr);
+ console.error('Received invalid logDebugMessage message:', rawMessage);
return;
}
- this.onDebugMessageHandler(message.data['message']);
+ this.onDebugMessageHandler((/** @type {string} */ message.data['message']));
} else if (message.method == 'onConnectionStatus') {
if (typeof message.data['state'] != 'string' ||
!remoting.ClientSession.State.hasOwnProperty(message.data['state']) ||
typeof message.data['error'] != 'string') {
- console.error('Received invalid onConnectionState message: ' +
- messageStr);
+ console.error('Received invalid onConnectionState message:',
+ rawMessage);
return;
}
@@ -247,7 +258,7 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
} else if (message.method == 'onDesktopSize') {
if (typeof message.data['width'] != 'number' ||
typeof message.data['height'] != 'number') {
- console.error('Received invalid onDesktopSize message: ' + messageStr);
+ console.error('Received invalid onDesktopSize message:', rawMessage);
return;
}
this.desktopWidth = /** @type {number} */ message.data['width'];
@@ -265,7 +276,7 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
typeof message.data['decodeLatency'] != 'number' ||
typeof message.data['renderLatency'] != 'number' ||
typeof message.data['roundtripLatency'] != 'number') {
- console.error('Received incorrect onPerfStats message: ' + messageStr);
+ console.error('Received incorrect onPerfStats message:', rawMessage);
return;
}
this.perfStats_ =
@@ -277,8 +288,9 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
return;
}
if (remoting.clipboard) {
- remoting.clipboard.fromHost(message.data['mimeType'],
- message.data['item']);
+ remoting.clipboard.fromHost(
+ (/** @type {string} */ message.data['mimeType']),
+ (/** @type {string} */ message.data['item']));
}
} else if (message.method == 'onFirstFrameReceived') {
if (remoting.clientSession) {
@@ -313,7 +325,8 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
}
/** @type {!Array.<string>} */
- var capabilities = tokenize(message.data['capabilities']);
+ var capabilities =
+ tokenize((/** @type {string} */ message.data['capabilities']));
this.onSetCapabilitiesHandler(capabilities);
} else if (message.method == 'fetchThirdPartyToken') {
if (typeof message.data['tokenUrl'] != 'string' ||
@@ -352,6 +365,28 @@ remoting.ClientPlugin.prototype.handleMessage_ = function(messageStr) {
message.data['type'] + ': ' + message.data['data']);
}
}
+ } else if (message.method == 'mediaSourceReset') {
+ if (typeof(message.data['format']) != 'string') {
+ console.error('Invalid mediaSourceReset message:', message.data);
+ return;
+ }
+ if (!this.mediaSourceRenderer_) {
+ console.error('Unexpected mediaSourceReset.');
+ return;
+ }
+ this.mediaSourceRenderer_.reset(
+ (/** @type {string} */ message.data['format']));
+ } else if (message.method == 'mediaSourceData') {
+ if (!(message.data['buffer'] instanceof ArrayBuffer)) {
+ console.error('Invalid mediaSourceData message:', message.data);
+ return;
+ }
+ if (!this.mediaSourceRenderer_) {
+ console.error('Unexpected mediaSourceData.');
+ return;
+ }
+ this.mediaSourceRenderer_.onIncomingData(
+ (/** @type {ArrayBuffer} */ message.data['buffer']));
}
};
@@ -565,8 +600,9 @@ remoting.ClientPlugin.prototype.notifyClientResolution =
*/
remoting.ClientPlugin.prototype.pauseVideo =
function(pause) {
- if (!this.hasFeature(remoting.ClientPlugin.Feature.PAUSE_VIDEO))
+ if (!this.hasFeature(remoting.ClientPlugin.Feature.PAUSE_VIDEO)) {
return;
+ }
this.plugin.postMessage(JSON.stringify(
{ method: 'pauseVideo', data: { pause: pause }}));
};
@@ -578,8 +614,9 @@ remoting.ClientPlugin.prototype.pauseVideo =
*/
remoting.ClientPlugin.prototype.pauseAudio =
function(pause) {
- if (!this.hasFeature(remoting.ClientPlugin.Feature.PAUSE_AUDIO))
+ if (!this.hasFeature(remoting.ClientPlugin.Feature.PAUSE_AUDIO)) {
return;
+ }
this.plugin.postMessage(JSON.stringify(
{ method: 'pauseAudio', data: { pause: pause }}));
};
@@ -658,6 +695,21 @@ remoting.ClientPlugin.prototype.sendClientMessage =
};
/**
+ * Request MediaStream-based rendering.
+ *
+ * @param {remoting.MediaSourceRenderer} mediaSourceRenderer
+ */
+remoting.ClientPlugin.prototype.enableMediaSourceRendering =
+ function(mediaSourceRenderer) {
+ if (!this.hasFeature(remoting.ClientPlugin.Feature.MEDIA_SOURCE_RENDERING)) {
+ return;
+ }
+ this.mediaSourceRenderer_ = mediaSourceRenderer;
+ this.plugin.postMessage(JSON.stringify(
+ { method: 'enableMediaSourceRendering', data: {} }));
+};
+
+/**
* If we haven't yet received a "hello" message from the plugin, change its
* size so that the user can confirm it if click-to-play is enabled, or can
* see the "this plugin is disabled" message if it is actually disabled.
diff --git a/remoting/webapp/client_session.js b/remoting/webapp/client_session.js
index 021446a..3a78b4e 100644
--- a/remoting/webapp/client_session.js
+++ b/remoting/webapp/client_session.js
@@ -124,6 +124,9 @@ remoting.ClientSession = function(accessCode, fetchPin, fetchThirdPartyToken,
document.getElementById('send-keys-menu')
);
+ /** @type {HTMLMediaElement} @private */
+ this.video_ = null;
+
/** @type {HTMLElement} @private */
this.resizeToClientButton_ =
document.getElementById('screen-resize-to-client');
@@ -448,6 +451,26 @@ remoting.ClientSession.prototype.onPluginInitialized_ = function(initialized) {
this.applyRemapKeys_(true);
}
+ // Enable MediaSource-based rendering if available.
+ if (remoting.settings.USE_MEDIA_SOURCE_RENDERING &&
+ this.plugin_.hasFeature(
+ remoting.ClientPlugin.Feature.MEDIA_SOURCE_RENDERING)) {
+ this.video_ = /** @type {HTMLMediaElement} */(
+ document.getElementById('mediasource-video-output'));
+ // Make sure that the <video> element is hidden until we get the first
+ // frame.
+ this.video_.style.width = '0px';
+ this.video_.style.height = '0px';
+
+ var renderer = new remoting.MediaSourceRenderer(this.video_);
+ this.plugin_.enableMediaSourceRendering(renderer);
+ /** @type {HTMLElement} */(document.getElementById('video-container'))
+ .classList.add('mediasource-rendering');
+ } else {
+ /** @type {HTMLElement} */(document.getElementById('video-container'))
+ .classList.remove('mediasource-rendering');
+ }
+
/** @param {string} msg The IQ stanza to send. */
this.plugin_.onOutgoingIqHandler = this.sendIq_.bind(this);
/** @param {string} msg The message to log. */
@@ -493,6 +516,11 @@ remoting.ClientSession.prototype.removePlugin = function() {
// In case the user had selected full-screen mode, cancel it now.
document.webkitCancelFullScreen();
+
+ // Remove mediasource-rendering class from video-contained - this will also
+ // hide the <video> element.
+ /** @type {HTMLElement} */(document.getElementById('video-container'))
+ .classList.remove('mediasource-rendering');
};
/**
@@ -1095,13 +1123,18 @@ remoting.ClientSession.prototype.updateDimensions = function() {
}
}
- var pluginWidth = desktopWidth * scale;
- var pluginHeight = desktopHeight * scale;
+ var pluginWidth = Math.round(desktopWidth * scale);
+ var pluginHeight = Math.round(desktopHeight * scale);
+
+ if (this.video_) {
+ this.video_.style.width = pluginWidth + 'px';
+ this.video_.style.height = pluginHeight + 'px';
+ }
// Resize the plugin if necessary.
// TODO(wez): Handle high-DPI to high-DPI properly (crbug.com/135089).
- this.plugin_.element().width = pluginWidth;
- this.plugin_.element().height = pluginHeight;
+ this.plugin_.element().style.width = pluginWidth + 'px';
+ this.plugin_.element().style.height = pluginHeight + 'px';
// Position the container.
// Note that clientWidth/Height take into account scrollbars.
diff --git a/remoting/webapp/js_proto/dom_proto.js b/remoting/webapp/js_proto/dom_proto.js
index 0ab4e4a..64f330f 100644
--- a/remoting/webapp/js_proto/dom_proto.js
+++ b/remoting/webapp/js_proto/dom_proto.js
@@ -85,12 +85,16 @@ Node.prototype.value;
Node.prototype.style;
-/** @constructor
- @param {function(Array.<MutationRecord>):void} callback */
+/**
+ * @constructor
+ * @param {function(Array.<MutationRecord>):void} callback
+ */
var MutationObserver = function(callback) {};
-/** @param {Element} element
- @param {Object} options */
+/**
+ * @param {Element} element
+ * @param {Object} options
+ */
MutationObserver.prototype.observe = function(element, options) {};
@@ -109,3 +113,57 @@ MutationRecord.prototype.type;
/** @type {{getRandomValues: function((Uint16Array|Uint8Array)):void}} */
Window.prototype.crypto;
+
+
+/**
+ * @constructor
+ * @implements {EventTarget} */
+var EventTargetStub = function() {};
+
+/**
+ * @param {string} type
+ * @param {(EventListener|function(Event): (boolean|undefined|null))} listener
+ * @param {boolean=} opt_useCapture
+ */
+EventTargetStub.prototype.addEventListener =
+ function(type, listener, opt_useCapture) {}
+
+/**
+ * @param {string} type
+ * @param {(EventListener|function(Event): (boolean|undefined|null))} listener
+ * @param {boolean=} opt_useCapture
+ */
+EventTargetStub.prototype.removeEventListener =
+ function(type, listener, opt_useCapture) {}
+
+/**
+ * @param {Event} event
+ */
+EventTargetStub.prototype.dispatchEvent =
+ function(event) {}
+
+/**
+ * @constructor
+ * @extends {EventTargetStub}
+ */
+var SourceBuffer = function() {}
+
+/** @type {boolean} */
+SourceBuffer.prototype.updating;
+
+/**
+ * @param {ArrayBuffer} buffer
+ */
+SourceBuffer.prototype.appendBuffer = function(buffer) {}
+
+/**
+ * @constructor
+ * @extends {EventTargetStub}
+ */
+var MediaSource = function() {}
+
+/**
+ * @param {string} format
+ * @return {SourceBuffer}
+ */
+MediaSource.prototype.addSourceBuffer = function(format) {}
diff --git a/remoting/webapp/main.css b/remoting/webapp/main.css
index b5b75b6..7673498 100644
--- a/remoting/webapp/main.css
+++ b/remoting/webapp/main.css
@@ -739,3 +739,35 @@ html.apps-v2.scrollable {
left: 0;
width: 100%;
}
+
+/* video-container needs relative position so that mediasource-video-output can
+ * be positioned relative to the parent with position:absolute. */
+#video-container {
+ position: relative;
+}
+
+/* mediasource-video-output is hidden by default. */
+#mediasource-video-output {
+ display: none;
+}
+
+/* Use absolute positioning for mediasource-video-output so that it's rendered
+ * at the same position as the plugin. */
+#video-container.mediasource-rendering #mediasource-video-output {
+ display: block;
+ position: absolute;
+ left: 0;
+ top: 0;
+ box-shadow: 0 0 8px 0 black;
+}
+
+/*
+ * With MediaSource-based rendering the plugin is transparent and is placed on
+ * top of the <video> element so that it can still receive mouse events.
+ *
+ * TODO(sergeyu): This is temporary solution. Ideally mouse and keyboard events
+ * should be captured on JS level and passed to the plugin.
+ */
+#video-container.mediasource-rendering #client-plugin-container {
+ opacity: 0;
+}
diff --git a/remoting/webapp/main.html b/remoting/webapp/main.html
index f99c7a8..9106c2d 100644
--- a/remoting/webapp/main.html
+++ b/remoting/webapp/main.html
@@ -39,6 +39,7 @@ found in the LICENSE file.
<script src="identity.js"></script>
<script src="l10n.js"></script>
<script src="log_to_server.js"></script>
+ <script src="media_source_renderer.js"></script>
<script src="menu_button.js"></script>
<script src="oauth2.js"></script>
<script src="oauth2_api.js"></script>
@@ -792,7 +793,10 @@ found in the LICENSE file.
</div> <!-- session-toolbar -->
<div class="vertically-centered">
<div class="horizontally-centered">
- <div id="client-plugin-container"></div>
+ <div id="video-container">
+ <video id="mediasource-video-output"></video>
+ <div id="client-plugin-container"></div>
+ </div>
</div>
</div>
</div> <!-- session-mode -->
diff --git a/remoting/webapp/media_source_renderer.js b/remoting/webapp/media_source_renderer.js
new file mode 100644
index 0000000..a79173b
--- /dev/null
+++ b/remoting/webapp/media_source_renderer.js
@@ -0,0 +1,86 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+'use strict';
+
+/** @suppress {duplicate} */
+var remoting = remoting || {};
+
+/**
+ * @param {HTMLMediaElement} videoTag <video> tag to render to.
+ * @constructor
+ */
+remoting.MediaSourceRenderer = function(videoTag) {
+ /** @type {HTMLMediaElement} */
+ this.video_ = videoTag;
+
+ /** @type {MediaSource} */
+ this.mediaSource_ = null;
+
+ /** @type {SourceBuffer} */
+ this.sourceBuffer_ = null;
+
+ /** @type {!Array.<ArrayBuffer>} Queue of pending buffers that haven't been
+ * processed . */
+ this.buffers_ = [];
+}
+
+/**
+ * @param {string} format Format of the stream.
+ */
+remoting.MediaSourceRenderer.prototype.reset = function(format) {
+ // Create a new MediaSource instance.
+ this.sourceBuffer_ = null;
+ this.mediaSource_ = new MediaSource();
+ this.mediaSource_.addEventListener('sourceopen',
+ this.onSourceOpen_.bind(this, format));
+ this.mediaSource_.addEventListener('sourceclose', function(e) {
+ console.error("MediaSource closed unexpectedly.");
+ });
+ this.mediaSource_.addEventListener('sourceended', function(e) {
+ console.error("MediaSource ended unexpectedly.");
+ });
+
+ // Start playback from new MediaSource.
+ this.video_.src =
+ /** @type {string} */(
+ window.URL.createObjectURL(/** @type {!Blob} */(this.mediaSource_)));
+ this.video_.play();
+}
+
+/**
+ * @param {string} format
+ * @private
+ */
+remoting.MediaSourceRenderer.prototype.onSourceOpen_ = function(format) {
+ this.sourceBuffer_ =
+ this.mediaSource_.addSourceBuffer(format);
+
+ this.sourceBuffer_.addEventListener(
+ 'updateend', this.processPendingData_.bind(this));
+ this.processPendingData_();
+}
+
+/**
+ * @private
+ */
+remoting.MediaSourceRenderer.prototype.processPendingData_ = function() {
+ if (this.sourceBuffer_) {
+ while (this.buffers_.length > 0 && !this.sourceBuffer_.updating) {
+ // TODO(sergeyu): Figure out the way to determine when a frame is rendered
+ // and use it to report performance statistics.
+ this.sourceBuffer_.appendBuffer(
+ /** @type {ArrayBuffer} */(this.buffers_.shift()));
+ }
+ }
+}
+
+/**
+ * @param {ArrayBuffer} data
+ */
+remoting.MediaSourceRenderer.prototype.onIncomingData = function(data) {
+ this.buffers_.push(data);
+ this.processPendingData_();
+}
+
diff --git a/remoting/webapp/plugin_settings.js b/remoting/webapp/plugin_settings.js
index 81e95bd..8d0a166 100644
--- a/remoting/webapp/plugin_settings.js
+++ b/remoting/webapp/plugin_settings.js
@@ -51,3 +51,6 @@ remoting.Settings.prototype.XMPP_SERVER_USE_TLS =
/** @type {string} The third party auth redirect URI. */
remoting.Settings.prototype.THIRD_PARTY_AUTH_REDIRECT_URI =
'THIRD_PARTY_AUTH_REDIRECT_URL';
+
+// Whether to use MediaSource API for video rendering.
+remoting.Settings.prototype.USE_MEDIA_SOURCE_RENDERING = false;
diff --git a/third_party/libwebm/libwebm.gyp b/third_party/libwebm/libwebm.gyp
index e277407..2374d67 100644
--- a/third_party/libwebm/libwebm.gyp
+++ b/third_party/libwebm/libwebm.gyp
@@ -12,7 +12,13 @@
'type': 'static_library',
'sources': [
'<@(libwebm_sources)'
- ]
+ ],
+ 'defines!': [
+ # This macro is declared in common.gypi which causes warning when
+ # compiling mkvmuxerutil.cpp which also defines it.
+ '_CRT_RAND_S',
+ ],
+ 'msvs_disabled_warnings': [ 4267 ],
}, # target libwebm
]
}