summaryrefslogtreecommitdiffstats
path: root/remoting
diff options
context:
space:
mode:
authorsergeyu <sergeyu@chromium.org>2015-09-29 23:40:27 -0700
committerCommit bot <commit-bot@chromium.org>2015-09-30 06:41:15 +0000
commit752c6e600df859a34cedae1b9aa2ce26c198a77b (patch)
tree37c5cade6d92766f6c7e977cc4d50d835eebedbd /remoting
parent995781750a4654d81f7d4ff0e59194f00f951c95 (diff)
downloadchromium_src-752c6e600df859a34cedae1b9aa2ce26c198a77b.zip
chromium_src-752c6e600df859a34cedae1b9aa2ce26c198a77b.tar.gz
chromium_src-752c6e600df859a34cedae1b9aa2ce26c198a77b.tar.bz2
Add UMA histograms for more detailed latency tracking on the CRD host.
Previously only two delays in CRD hosts were tracked: capture time and encode time. This CL adds 4 other values that are now measured on the host, sent to the client and logged to UMA: - capture_pending_time: time between input event being received and when the next frame starts capturing. - capture_overhead_time: extra latency for the capturer caused by IPC and threading. - encode_pending_time: delay between capturer and encoder. - send_pending_time: time encoded packets wait in the send queue. VideoFramePump is responsible for measuring all latency values sent to the client except for capture_time_ms which is still measured by the capturer. Review URL: https://codereview.chromium.org/1365663003 Cr-Commit-Position: refs/heads/master@{#351504}
Diffstat (limited to 'remoting')
-rw-r--r--remoting/codec/video_encoder_helper.cc3
-rw-r--r--remoting/codec/video_encoder_helper_unittest.cc1
-rw-r--r--remoting/codec/video_encoder_verbatim.cc7
-rw-r--r--remoting/codec/video_encoder_vpx.cc8
-rw-r--r--remoting/host/client_session.cc7
-rw-r--r--remoting/host/client_session.h4
-rw-r--r--remoting/host/video_frame_pump.cc145
-rw-r--r--remoting/host/video_frame_pump.h63
-rw-r--r--remoting/proto/video.proto13
-rw-r--r--remoting/protocol/connection_to_client.cc8
-rw-r--r--remoting/protocol/connection_to_client.h8
-rw-r--r--remoting/protocol/host_event_dispatcher.cc10
-rw-r--r--remoting/protocol/host_event_dispatcher.h8
-rw-r--r--remoting/protocol/performance_tracker.cc112
-rw-r--r--remoting/protocol/performance_tracker.h4
-rw-r--r--remoting/protocol/protocol_mock_objects.h4
16 files changed, 282 insertions, 123 deletions
diff --git a/remoting/codec/video_encoder_helper.cc b/remoting/codec/video_encoder_helper.cc
index 5310d01..288ec72a 100644
--- a/remoting/codec/video_encoder_helper.cc
+++ b/remoting/codec/video_encoder_helper.cc
@@ -57,8 +57,7 @@ VideoEncoderHelper::CreateVideoPacketWithUpdatedRegion(
}
}
- // Store the capture time and frame DPI.
- packet->set_capture_time_ms(frame.capture_time_ms());
+ // Store frame DPI.
if (!frame.dpi().is_zero()) {
packet->mutable_format()->set_x_dpi(frame.dpi().x());
packet->mutable_format()->set_y_dpi(frame.dpi().y());
diff --git a/remoting/codec/video_encoder_helper_unittest.cc b/remoting/codec/video_encoder_helper_unittest.cc
index e464802..569ecac 100644
--- a/remoting/codec/video_encoder_helper_unittest.cc
+++ b/remoting/codec/video_encoder_helper_unittest.cc
@@ -36,7 +36,6 @@ TEST(VideoEncoderHelperTest, PropagatesCommonFields) {
EXPECT_TRUE(packet->format().has_x_dpi());
EXPECT_TRUE(packet->format().has_y_dpi());
- EXPECT_TRUE(packet->has_capture_time_ms());
EXPECT_EQ(1, packet->dirty_rects().size());
ASSERT_TRUE(packet->has_use_desktop_shape());
diff --git a/remoting/codec/video_encoder_verbatim.cc b/remoting/codec/video_encoder_verbatim.cc
index 2948e3e..8d1befb 100644
--- a/remoting/codec/video_encoder_verbatim.cc
+++ b/remoting/codec/video_encoder_verbatim.cc
@@ -6,7 +6,6 @@
#include "base/logging.h"
#include "base/stl_util.h"
-#include "base/time/time.h"
#include "remoting/base/util.h"
#include "remoting/proto/video.pb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
@@ -32,8 +31,6 @@ scoped_ptr<VideoPacket> VideoEncoderVerbatim::Encode(
if (frame.updated_region().is_empty())
return nullptr;
- base::Time encode_start_time = base::Time::Now();
-
// Create a VideoPacket with common fields (e.g. DPI, rects, shape) set.
scoped_ptr<VideoPacket> packet(helper_.CreateVideoPacket(frame));
packet->mutable_format()->set_encoding(VideoPacketFormat::ENCODING_VERBATIM);
@@ -64,10 +61,6 @@ scoped_ptr<VideoPacket> VideoEncoderVerbatim::Encode(
}
}
- // Note the time taken to encode the pixel data.
- packet->set_encode_time_ms(
- (base::Time::Now() - encode_start_time).InMillisecondsRoundedUp());
-
return packet.Pass();
}
diff --git a/remoting/codec/video_encoder_vpx.cc b/remoting/codec/video_encoder_vpx.cc
index 1fa19a4..0dcd456 100644
--- a/remoting/codec/video_encoder_vpx.cc
+++ b/remoting/codec/video_encoder_vpx.cc
@@ -270,8 +270,6 @@ scoped_ptr<VideoPacket> VideoEncoderVpx::Encode(
if (frame.updated_region().is_empty() && !encode_unchanged_frame_)
return nullptr;
- base::TimeTicks encode_start_time = base::TimeTicks::Now();
-
// Create or reconfigure the codec to match the size of |frame|.
if (!codec_ ||
(image_ &&
@@ -296,7 +294,7 @@ scoped_ptr<VideoPacket> VideoEncoderVpx::Encode(
}
// Do the actual encoding.
- int timestamp = (encode_start_time - timestamp_base_).InMilliseconds();
+ int timestamp = (base::TimeTicks::Now() - timestamp_base_).InMilliseconds();
vpx_codec_err_t ret = vpx_codec_encode(
codec_.get(), image_.get(), timestamp, 1, 0, VPX_DL_REALTIME);
DCHECK_EQ(ret, VPX_CODEC_OK)
@@ -341,10 +339,6 @@ scoped_ptr<VideoPacket> VideoEncoderVpx::Encode(
}
}
- // Note the time taken to encode the pixel data.
- packet->set_encode_time_ms(
- (base::TimeTicks::Now() - encode_start_time).InMillisecondsRoundedUp());
-
return packet.Pass();
}
diff --git a/remoting/host/client_session.cc b/remoting/host/client_session.cc
index 18b3c66..fb404e8 100644
--- a/remoting/host/client_session.cc
+++ b/remoting/host/client_session.cc
@@ -404,13 +404,14 @@ void ClientSession::OnConnectionClosed(
event_handler_->OnSessionClosed(this);
}
-void ClientSession::OnEventTimestamp(protocol::ConnectionToClient* connection,
- int64 timestamp) {
+void ClientSession::OnInputEventReceived(
+ protocol::ConnectionToClient* connection,
+ int64_t event_timestamp) {
DCHECK(CalledOnValidThread());
DCHECK_EQ(connection_.get(), connection);
if (video_frame_pump_.get())
- video_frame_pump_->SetLatestEventTimestamp(timestamp);
+ video_frame_pump_->OnInputEventReceived(event_timestamp);
}
void ClientSession::OnRouteChange(
diff --git a/remoting/host/client_session.h b/remoting/host/client_session.h
index 7d34439..d41e83e 100644
--- a/remoting/host/client_session.h
+++ b/remoting/host/client_session.h
@@ -122,8 +122,8 @@ class ClientSession
protocol::ConnectionToClient* connection) override;
void OnConnectionClosed(protocol::ConnectionToClient* connection,
protocol::ErrorCode error) override;
- void OnEventTimestamp(protocol::ConnectionToClient* connection,
- int64 timestamp) override;
+ void OnInputEventReceived(protocol::ConnectionToClient* connection,
+ int64_t timestamp) override;
void OnRouteChange(protocol::ConnectionToClient* connection,
const std::string& channel_name,
const protocol::TransportRoute& route) override;
diff --git a/remoting/host/video_frame_pump.cc b/remoting/host/video_frame_pump.cc
index 37d9da7..58b72db 100644
--- a/remoting/host/video_frame_pump.cc
+++ b/remoting/host/video_frame_pump.cc
@@ -20,28 +20,6 @@
namespace remoting {
-namespace {
-
-scoped_ptr<VideoPacket> EncodeFrame(VideoEncoder* encoder,
- scoped_ptr<webrtc::DesktopFrame> frame) {
- scoped_ptr<VideoPacket> packet;
-
- // If |frame| is non-NULL then let the encoder process it.
- if (frame) {
- packet = encoder->Encode(*frame);
- }
-
- // If |frame| is NULL, or the encoder returned nothing, return an empty
- // packet.
- if (!packet) {
- packet.reset(new VideoPacket());
- }
-
- return packet.Pass();
-}
-
-} // namespace
-
// Interval between empty keep-alive frames. These frames are sent only when the
// stream is paused or inactive for some other reason (e.g. when blocked on
// capturer). To prevent PseudoTCP from resetting congestion window this value
@@ -50,6 +28,16 @@ static const int kKeepAlivePacketIntervalMs = 200;
static bool g_enable_timestamps = false;
+VideoFramePump::FrameTimestamps::FrameTimestamps() {}
+VideoFramePump::FrameTimestamps::~FrameTimestamps() {}
+
+VideoFramePump::PacketWithTimestamps::PacketWithTimestamps(
+ scoped_ptr<VideoPacket> packet,
+ scoped_ptr<FrameTimestamps> timestamps)
+ : packet(packet.Pass()), timestamps(timestamps.Pass()) {}
+
+VideoFramePump::PacketWithTimestamps::~PacketWithTimestamps() {}
+
// static
void VideoFramePump::EnableTimestampsForTests() {
g_enable_timestamps = true;
@@ -72,7 +60,6 @@ VideoFramePump::VideoFramePump(
false),
capture_scheduler_(base::Bind(&VideoFramePump::CaptureNextFrame,
base::Unretained(this))),
- latest_event_timestamp_(0),
weak_factory_(this) {
DCHECK(encoder_);
DCHECK(video_stub_);
@@ -91,10 +78,13 @@ void VideoFramePump::Pause(bool pause) {
capture_scheduler_.Pause(pause);
}
-void VideoFramePump::SetLatestEventTimestamp(int64 latest_event_timestamp) {
+void VideoFramePump::OnInputEventReceived(int64_t event_timestamp) {
DCHECK(thread_checker_.CalledOnValidThread());
- latest_event_timestamp_ = latest_event_timestamp;
+ if (!next_frame_timestamps_)
+ next_frame_timestamps_.reset(new FrameTimestamps());
+ next_frame_timestamps_->input_event_client_timestamp = event_timestamp;
+ next_frame_timestamps_->input_event_received_time = base::TimeTicks::Now();
}
void VideoFramePump::SetLosslessEncode(bool want_lossless) {
@@ -123,45 +113,126 @@ void VideoFramePump::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
capture_scheduler_.OnCaptureCompleted();
+ captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now();
+
// Even when |frame| is nullptr we still need to post it to the encode thread
// to make sure frames are freed in the same order they are received and
// that we don't start capturing frame n+2 before frame n is freed.
base::PostTaskAndReplyWithResult(
encode_task_runner_.get(), FROM_HERE,
- base::Bind(&EncodeFrame, encoder_.get(),
- base::Passed(make_scoped_ptr(frame))),
- base::Bind(&VideoFramePump::SendEncodedFrame, weak_factory_.GetWeakPtr(),
- latest_event_timestamp_, base::TimeTicks::Now()));
+ base::Bind(&VideoFramePump::EncodeFrame, encoder_.get(),
+ base::Passed(make_scoped_ptr(frame)),
+ base::Passed(&captured_frame_timestamps_)),
+ base::Bind(&VideoFramePump::OnFrameEncoded, weak_factory_.GetWeakPtr()));
}
void VideoFramePump::CaptureNextFrame() {
DCHECK(thread_checker_.CalledOnValidThread());
+ // |next_frame_timestamps_| is not set if no input events were received since
+ // the previous frame. In that case create FrameTimestamps instance without
+ // setting |input_event_client_timestamp| and |input_event_received_time|.
+ if (!next_frame_timestamps_)
+ next_frame_timestamps_.reset(new FrameTimestamps());
+
+ captured_frame_timestamps_ = next_frame_timestamps_.Pass();
+ captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now();
+
capturer_->Capture(webrtc::DesktopRegion());
}
-void VideoFramePump::SendEncodedFrame(int64 latest_event_timestamp,
- base::TimeTicks timestamp,
- scoped_ptr<VideoPacket> packet) {
+// static
+scoped_ptr<VideoFramePump::PacketWithTimestamps> VideoFramePump::EncodeFrame(
+ VideoEncoder* encoder,
+ scoped_ptr<webrtc::DesktopFrame> frame,
+ scoped_ptr<FrameTimestamps> timestamps) {
+ timestamps->encode_started_time = base::TimeTicks::Now();
+
+ scoped_ptr<VideoPacket> packet;
+ // If |frame| is non-NULL then let the encoder process it.
+ if (frame)
+ packet = encoder->Encode(*frame);
+
+ // If |frame| is NULL, or the encoder returned nothing, return an empty
+ // packet.
+ if (!packet)
+ packet.reset(new VideoPacket());
+
+ if (frame)
+ packet->set_capture_time_ms(frame->capture_time_ms());
+
+ timestamps->encode_ended_time = base::TimeTicks::Now();
+
+ return make_scoped_ptr(
+ new PacketWithTimestamps(packet.Pass(), timestamps.Pass()));
+}
+
+void VideoFramePump::OnFrameEncoded(scoped_ptr<PacketWithTimestamps> packet) {
DCHECK(thread_checker_.CalledOnValidThread());
- if (g_enable_timestamps)
- packet->set_timestamp(timestamp.ToInternalValue());
+ capture_scheduler_.OnFrameEncoded(packet->packet.get());
- packet->set_latest_event_timestamp(latest_event_timestamp);
+ if (send_pending_) {
+ pending_packets_.push_back(packet.Pass());
+ } else {
+ SendPacket(packet.Pass());
+ }
+}
- capture_scheduler_.OnFrameEncoded(packet.get());
+void VideoFramePump::SendPacket(scoped_ptr<PacketWithTimestamps> packet) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(!send_pending_);
- video_stub_->ProcessVideoPacket(packet.Pass(),
+ packet->timestamps->can_send_time = base::TimeTicks::Now();
+ UpdateFrameTimers(packet->packet.get(), packet->timestamps.get());
+
+ send_pending_ = true;
+ video_stub_->ProcessVideoPacket(packet->packet.Pass(),
base::Bind(&VideoFramePump::OnVideoPacketSent,
weak_factory_.GetWeakPtr()));
}
+void VideoFramePump::UpdateFrameTimers(VideoPacket* packet,
+ FrameTimestamps* timestamps) {
+ if (g_enable_timestamps)
+ packet->set_timestamp(timestamps->capture_ended_time.ToInternalValue());
+
+
+ if (!timestamps->input_event_received_time.is_null()) {
+ packet->set_capture_pending_time_ms((timestamps->capture_started_time -
+ timestamps->input_event_received_time)
+ .InMilliseconds());
+ packet->set_latest_event_timestamp(
+ timestamps->input_event_client_timestamp);
+ }
+
+ packet->set_capture_overhead_time_ms(
+ (timestamps->capture_ended_time - timestamps->capture_started_time)
+ .InMilliseconds() -
+ packet->capture_time_ms());
+
+ packet->set_encode_pending_time_ms(
+ (timestamps->encode_started_time - timestamps->capture_ended_time)
+ .InMilliseconds());
+
+ packet->set_send_pending_time_ms(
+ (timestamps->can_send_time - timestamps->encode_ended_time)
+ .InMilliseconds());
+}
+
void VideoFramePump::OnVideoPacketSent() {
DCHECK(thread_checker_.CalledOnValidThread());
+ send_pending_ = false;
capture_scheduler_.OnFrameSent();
keep_alive_timer_.Reset();
+
+ // Send next packet if any.
+ if (!pending_packets_.empty()) {
+ scoped_ptr<PacketWithTimestamps> next(pending_packets_.front());
+ pending_packets_.weak_erase(pending_packets_.begin());
+ SendPacket(next.Pass());
+ }
}
void VideoFramePump::SendKeepAlivePacket() {
diff --git a/remoting/host/video_frame_pump.h b/remoting/host/video_frame_pump.h
index 5bafa3e..f4542ca 100644
--- a/remoting/host/video_frame_pump.h
+++ b/remoting/host/video_frame_pump.h
@@ -8,6 +8,7 @@
#include "base/basictypes.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h"
+#include "base/memory/scoped_vector.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "base/timer/timer.h"
@@ -82,9 +83,8 @@ class VideoFramePump : public webrtc::DesktopCapturer::Callback {
// only affects capture scheduling and does not stop/start the capturer.
void Pause(bool pause);
- // Updates event timestamp from the last event received from the client. This
- // value is sent back to the client for roundtrip latency estimates.
- void SetLatestEventTimestamp(int64 latest_event_timestamp);
+ // Called whenever input event is received.
+ void OnInputEventReceived(int64_t event_timestamp);
// Sets whether the video encoder should be requested to encode losslessly,
// or to use a lossless color space (typically requiring higher bandwidth).
@@ -96,6 +96,33 @@ class VideoFramePump : public webrtc::DesktopCapturer::Callback {
}
private:
+ struct FrameTimestamps {
+ FrameTimestamps();
+ ~FrameTimestamps();
+
+ // The following two fields are set only for one frame after each incoming
+ // input event. |input_event_client_timestamp| is event timestamp
+ // received from the client. |input_event_received_time| is local time when
+ // the event was received.
+ int64_t input_event_client_timestamp = -1;
+ base::TimeTicks input_event_received_time;
+
+ base::TimeTicks capture_started_time;
+ base::TimeTicks capture_ended_time;
+ base::TimeTicks encode_started_time;
+ base::TimeTicks encode_ended_time;
+ base::TimeTicks can_send_time;
+ };
+
+ struct PacketWithTimestamps {
+ PacketWithTimestamps(scoped_ptr<VideoPacket> packet,
+ scoped_ptr<FrameTimestamps> timestamps);
+ ~PacketWithTimestamps();
+
+ scoped_ptr<VideoPacket> packet;
+ scoped_ptr<FrameTimestamps> timestamps;
+ };
+
// webrtc::DesktopCapturer::Callback interface.
webrtc::SharedMemory* CreateSharedMemory(size_t size) override;
void OnCaptureCompleted(webrtc::DesktopFrame* frame) override;
@@ -103,10 +130,21 @@ class VideoFramePump : public webrtc::DesktopCapturer::Callback {
// Callback for CaptureScheduler.
void CaptureNextFrame();
- // Sends encoded frame
- void SendEncodedFrame(int64 latest_event_timestamp,
- base::TimeTicks timestamp,
- scoped_ptr<VideoPacket> packet);
+ // Task running on the encoder thread to encode the |frame|.
+ static scoped_ptr<PacketWithTimestamps> EncodeFrame(
+ VideoEncoder* encoder,
+ scoped_ptr<webrtc::DesktopFrame> frame,
+ scoped_ptr<FrameTimestamps> timestamps);
+
+ // Task called when a frame has finished encoding.
+ void OnFrameEncoded(scoped_ptr<PacketWithTimestamps> packet);
+
+ // Sends |packet| to the client.
+ void SendPacket(scoped_ptr<PacketWithTimestamps> packet);
+
+ // Helper called from SendPacket() to calculate timing fields in the |packet|
+ // before sending it.
+ void UpdateFrameTimers(VideoPacket* packet, FrameTimestamps* timestamps);
// Callback passed to |video_stub_|.
void OnVideoPacketSent();
@@ -137,8 +175,15 @@ class VideoFramePump : public webrtc::DesktopCapturer::Callback {
// captured.
CaptureScheduler capture_scheduler_;
- // Number updated by the caller to trace performance.
- int64 latest_event_timestamp_;
+ // Timestamps for the frame to be captured next.
+ scoped_ptr<FrameTimestamps> next_frame_timestamps_;
+
+ // Timestamps for the frame that's being captured.
+ scoped_ptr<FrameTimestamps> captured_frame_timestamps_;
+
+ bool send_pending_ = false;
+
+ ScopedVector<PacketWithTimestamps> pending_packets_;
base::ThreadChecker thread_checker_;
diff --git a/remoting/proto/video.proto b/remoting/proto/video.proto
index b2b1258..66e7ce2 100644
--- a/remoting/proto/video.proto
+++ b/remoting/proto/video.proto
@@ -71,6 +71,19 @@ message VideoPacket {
// Frame identifier used to match VideoFrame and VideoAck.
optional int32 frame_id = 13;
+
+ // Time from when the last event was received until capturing has started.
+ optional int64 capture_pending_time_ms = 14;
+
+ // Total overhead time for IPC and threading when capturing frames.
+ optional int64 capture_overhead_time_ms = 15;
+
+ // Time between when the frame was captured and when encoder started encoding
+ // it.
+ optional int64 encode_pending_time_ms = 16;
+
+ // Time for which the frame is blocked until it's sent to the client.
+ optional int64 send_pending_time_ms = 17;
}
// VideoAck acknowledges that the frame in the VideoPacket with the same
diff --git a/remoting/protocol/connection_to_client.cc b/remoting/protocol/connection_to_client.cc
index fed09fbb..18943f4 100644
--- a/remoting/protocol/connection_to_client.cc
+++ b/remoting/protocol/connection_to_client.cc
@@ -46,9 +46,9 @@ void ConnectionToClient::Disconnect() {
session_->Close();
}
-void ConnectionToClient::OnEventTimestamp(int64 sequence_number) {
+void ConnectionToClient::OnInputEventReceived(int64_t timestamp) {
DCHECK(CalledOnValidThread());
- handler_->OnEventTimestamp(this, sequence_number);
+ handler_->OnInputEventReceived(this, timestamp);
}
VideoStub* ConnectionToClient::video_stub() {
@@ -112,8 +112,8 @@ void ConnectionToClient::OnSessionStateChange(Session::State state) {
event_dispatcher_.reset(new HostEventDispatcher());
event_dispatcher_->Init(session_.get(), session_->config().event_config(),
this);
- event_dispatcher_->set_event_timestamp_callback(base::Bind(
- &ConnectionToClient::OnEventTimestamp, base::Unretained(this)));
+ event_dispatcher_->set_on_input_event_callback(base::Bind(
+ &ConnectionToClient::OnInputEventReceived, base::Unretained(this)));
video_dispatcher_.reset(new HostVideoDispatcher());
video_dispatcher_->Init(session_.get(), session_->config().video_config(),
diff --git a/remoting/protocol/connection_to_client.h b/remoting/protocol/connection_to_client.h
index 43c0b1a..2425546 100644
--- a/remoting/protocol/connection_to_client.h
+++ b/remoting/protocol/connection_to_client.h
@@ -52,9 +52,9 @@ class ConnectionToClient : public base::NonThreadSafe,
virtual void OnConnectionClosed(ConnectionToClient* connection,
ErrorCode error) = 0;
- // Called when sequence number is updated.
- virtual void OnEventTimestamp(ConnectionToClient* connection,
- int64 timestamp) = 0;
+ // Called when a new input event is received.
+ virtual void OnInputEventReceived(ConnectionToClient* connection,
+ int64_t timestamp) = 0;
// Called on notification of a route change event, which happens when a
// channel is connected.
@@ -83,7 +83,7 @@ class ConnectionToClient : public base::NonThreadSafe,
// Callback for HostEventDispatcher to be called with a timestamp for each
// received event.
- virtual void OnEventTimestamp(int64 timestamp);
+ virtual void OnInputEventReceived(int64_t timestamp);
// Get the stubs used by the host to transmit messages to the client.
// The stubs must not be accessed before OnConnectionAuthenticated(), or
diff --git a/remoting/protocol/host_event_dispatcher.cc b/remoting/protocol/host_event_dispatcher.cc
index 8006eac..f3fa8d9 100644
--- a/remoting/protocol/host_event_dispatcher.cc
+++ b/remoting/protocol/host_event_dispatcher.cc
@@ -19,11 +19,9 @@ HostEventDispatcher::HostEventDispatcher()
input_stub_(nullptr),
parser_(base::Bind(&HostEventDispatcher::OnMessageReceived,
base::Unretained(this)),
- reader()) {
-}
+ reader()) {}
-HostEventDispatcher::~HostEventDispatcher() {
-}
+HostEventDispatcher::~HostEventDispatcher() {}
void HostEventDispatcher::OnMessageReceived(scoped_ptr<EventMessage> message,
const base::Closure& done_task) {
@@ -31,8 +29,8 @@ void HostEventDispatcher::OnMessageReceived(scoped_ptr<EventMessage> message,
base::ScopedClosureRunner done_runner(done_task);
- if (message->has_timestamp() && !event_timestamp_callback_.is_null())
- event_timestamp_callback_.Run(message->timestamp());
+ if (!on_input_event_callback_.is_null())
+ on_input_event_callback_.Run(message->timestamp());
if (message->has_key_event()) {
const KeyEvent& event = message->key_event();
diff --git a/remoting/protocol/host_event_dispatcher.h b/remoting/protocol/host_event_dispatcher.h
index 3f1bd88..8fa3564 100644
--- a/remoting/protocol/host_event_dispatcher.h
+++ b/remoting/protocol/host_event_dispatcher.h
@@ -18,7 +18,7 @@ class InputStub;
// channel to InputStub.
class HostEventDispatcher : public ChannelDispatcherBase {
public:
- typedef base::Callback<void(int64)> EventTimestampCallback;
+ typedef base::Callback<void(int64_t)> OnInputEventCallback;
HostEventDispatcher();
~HostEventDispatcher() override;
@@ -30,8 +30,8 @@ class HostEventDispatcher : public ChannelDispatcherBase {
// Set callback to notify of each message's sequence number. The
// callback cannot tear down this object.
- void set_event_timestamp_callback(const EventTimestampCallback& value) {
- event_timestamp_callback_ = value;
+ void set_on_input_event_callback(const OnInputEventCallback& value) {
+ on_input_event_callback_ = value;
}
private:
@@ -39,7 +39,7 @@ class HostEventDispatcher : public ChannelDispatcherBase {
const base::Closure& done_task);
InputStub* input_stub_;
- EventTimestampCallback event_timestamp_callback_;
+ OnInputEventCallback on_input_event_callback_;
ProtobufMessageParser<EventMessage> parser_;
diff --git a/remoting/protocol/performance_tracker.cc b/remoting/protocol/performance_tracker.cc
index 271336b..283e59b 100644
--- a/remoting/protocol/performance_tracker.cc
+++ b/remoting/protocol/performance_tracker.cc
@@ -20,6 +20,13 @@ const char kVideoPaintLatencyHistogram[] = "Chromoting.Video.PaintLatency";
const char kVideoFrameRateHistogram[] = "Chromoting.Video.FrameRate";
const char kVideoPacketRateHistogram[] = "Chromoting.Video.PacketRate";
const char kVideoBandwidthHistogram[] = "Chromoting.Video.Bandwidth";
+const char kCapturePendingLatencyHistogram[] =
+ "Chromoting.Video.CapturePendingLatency";
+const char kCaptureOverheadHistogram[] = "Chromoting.Video.CaptureOverhead";
+const char kEncodePendingLatencyHistogram[] =
+ "Chromoting.Video.EncodePendingLatency";
+const char kSendPendingLatencyHistogram[] =
+ "Chromoting.Video.SendPendingLatency";
// Custom count and custom time histograms are log-scaled by default. This
// results in fine-grained buckets at lower values and wider-ranged buckets
@@ -54,6 +61,16 @@ const int kBandwidthHistogramBuckets = 100;
// boundary value, so set to 101.
const int kMaxFramesPerSec = 101;
+
+void UpdateUmaEnumHistogramStub(const std::string& histogram_name,
+ int64_t value,
+ int histogram_max) {}
+
+void UpdateUmaCustomHistogramStub(const std::string& histogram_name,
+ int64_t value,
+ int histogram_min,
+ int histogram_max,
+ int histogram_buckets) {}
} // namespace
namespace remoting {
@@ -69,7 +86,11 @@ PerformanceTracker::PerformanceTracker()
video_encode_ms_(kLatencySampleSize),
video_decode_ms_(kLatencySampleSize),
video_paint_ms_(kLatencySampleSize),
- round_trip_ms_(kLatencySampleSize) {}
+ round_trip_ms_(kLatencySampleSize) {
+ uma_custom_counts_updater_ = base::Bind(&UpdateUmaCustomHistogramStub);
+ uma_custom_times_updater_ = base::Bind(&UpdateUmaCustomHistogramStub);
+ uma_enum_histogram_updater_ = base::Bind(&UpdateUmaEnumHistogramStub);
+}
PerformanceTracker::~PerformanceTracker() {}
@@ -77,6 +98,10 @@ void PerformanceTracker::SetUpdateUmaCallbacks(
UpdateUmaCustomHistogramCallback update_uma_custom_counts_callback,
UpdateUmaCustomHistogramCallback update_uma_custom_times_callback,
UpdateUmaEnumHistogramCallback update_uma_enum_histogram_callback) {
+ DCHECK(!update_uma_custom_counts_callback.is_null());
+ DCHECK(!update_uma_custom_times_callback.is_null());
+ DCHECK(!update_uma_enum_histogram_callback.is_null());
+
uma_custom_counts_updater_ = update_uma_custom_counts_callback;
uma_custom_times_updater_ = update_uma_custom_times_callback;
uma_enum_histogram_updater_ = update_uma_enum_histogram_callback;
@@ -105,11 +130,10 @@ void PerformanceTracker::RecordVideoPacketStats(const VideoPacket& packet) {
round_trip_ms_.Record(round_trip_latency.InMilliseconds());
- if (!uma_custom_times_updater_.is_null())
- uma_custom_times_updater_.Run(
- kRoundTripLatencyHistogram, round_trip_latency.InMilliseconds(),
- kLatencyHistogramMinMs, kLatencyHistogramMaxMs,
- kLatencyHistogramBuckets);
+ uma_custom_times_updater_.Run(
+ kRoundTripLatencyHistogram, round_trip_latency.InMilliseconds(),
+ kLatencyHistogramMinMs, kLatencyHistogramMaxMs,
+ kLatencyHistogramBuckets);
}
// If the packet is empty, there are no other stats to update.
@@ -121,49 +145,71 @@ void PerformanceTracker::RecordVideoPacketStats(const VideoPacket& packet) {
if (packet.has_capture_time_ms()) {
video_capture_ms_.Record(packet.capture_time_ms());
- if (!uma_custom_times_updater_.is_null())
- uma_custom_times_updater_.Run(
- kVideoCaptureLatencyHistogram, packet.capture_time_ms(),
- kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
- kVideoActionsHistogramsBuckets);
+ uma_custom_times_updater_.Run(
+ kVideoCaptureLatencyHistogram, packet.capture_time_ms(),
+ kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
+ kVideoActionsHistogramsBuckets);
}
if (packet.has_encode_time_ms()) {
video_encode_ms_.Record(packet.encode_time_ms());
- if (!uma_custom_times_updater_.is_null())
- uma_custom_times_updater_.Run(
- kVideoEncodeLatencyHistogram, packet.encode_time_ms(),
- kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
- kVideoActionsHistogramsBuckets);
+ uma_custom_times_updater_.Run(
+ kVideoEncodeLatencyHistogram, packet.encode_time_ms(),
+ kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
+ kVideoActionsHistogramsBuckets);
+ }
+
+ if (packet.has_capture_pending_time_ms()) {
+ uma_custom_times_updater_.Run(
+ kCapturePendingLatencyHistogram, packet.capture_pending_time_ms(),
+ kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
+ kVideoActionsHistogramsBuckets);
+ }
+
+ if (packet.has_capture_overhead_time_ms()) {
+ uma_custom_times_updater_.Run(
+ kCaptureOverheadHistogram, packet.capture_overhead_time_ms(),
+ kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
+ kVideoActionsHistogramsBuckets);
+ }
+
+ if (packet.has_encode_pending_time_ms()) {
+ uma_custom_times_updater_.Run(
+ kEncodePendingLatencyHistogram, packet.encode_pending_time_ms(),
+ kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
+ kVideoActionsHistogramsBuckets);
+ }
+
+ if (packet.has_send_pending_time_ms()) {
+ uma_custom_times_updater_.Run(
+ kSendPendingLatencyHistogram, packet.send_pending_time_ms(),
+ kVideoActionsHistogramsMinMs, kVideoActionsHistogramsMaxMs,
+ kVideoActionsHistogramsBuckets);
}
}
void PerformanceTracker::RecordDecodeTime(double value) {
video_decode_ms_.Record(value);
- if (!uma_custom_times_updater_.is_null())
- uma_custom_times_updater_.Run(
- kVideoDecodeLatencyHistogram, value, kVideoActionsHistogramsMinMs,
- kVideoActionsHistogramsMaxMs, kVideoActionsHistogramsBuckets);
+ uma_custom_times_updater_.Run(
+ kVideoDecodeLatencyHistogram, value, kVideoActionsHistogramsMinMs,
+ kVideoActionsHistogramsMaxMs, kVideoActionsHistogramsBuckets);
}
void PerformanceTracker::RecordPaintTime(double value) {
video_paint_ms_.Record(value);
- if (!uma_custom_times_updater_.is_null())
- uma_custom_times_updater_.Run(
- kVideoPaintLatencyHistogram, value, kVideoActionsHistogramsMinMs,
- kVideoActionsHistogramsMaxMs, kVideoActionsHistogramsBuckets);
+ uma_custom_times_updater_.Run(
+ kVideoPaintLatencyHistogram, value, kVideoActionsHistogramsMinMs,
+ kVideoActionsHistogramsMaxMs, kVideoActionsHistogramsBuckets);
}
void PerformanceTracker::UploadRateStatsToUma() {
- if (!uma_enum_histogram_updater_.is_null()) {
- uma_enum_histogram_updater_.Run(kVideoFrameRateHistogram,
- video_frame_rate(), kMaxFramesPerSec);
- uma_enum_histogram_updater_.Run(kVideoPacketRateHistogram,
- video_packet_rate(), kMaxFramesPerSec);
- uma_custom_counts_updater_.Run(
- kVideoBandwidthHistogram, video_bandwidth(), kBandwidthHistogramMinBps,
- kBandwidthHistogramMaxBps, kBandwidthHistogramBuckets);
- }
+ uma_enum_histogram_updater_.Run(kVideoFrameRateHistogram, video_frame_rate(),
+ kMaxFramesPerSec);
+ uma_enum_histogram_updater_.Run(kVideoPacketRateHistogram,
+ video_packet_rate(), kMaxFramesPerSec);
+ uma_custom_counts_updater_.Run(
+ kVideoBandwidthHistogram, video_bandwidth(), kBandwidthHistogramMinBps,
+ kBandwidthHistogramMaxBps, kBandwidthHistogramBuckets);
}
void PerformanceTracker::OnPauseStateChanged(bool paused) {
diff --git a/remoting/protocol/performance_tracker.h b/remoting/protocol/performance_tracker.h
index af9e4c7..f1d9aa0 100644
--- a/remoting/protocol/performance_tracker.h
+++ b/remoting/protocol/performance_tracker.h
@@ -22,7 +22,7 @@ class PerformanceTracker {
public:
// Callback that updates UMA custom counts or custom times histograms.
typedef base::Callback<void(const std::string& histogram_name,
- int64 value,
+ int64_t value,
int histogram_min,
int histogram_max,
int histogram_buckets)>
@@ -30,7 +30,7 @@ class PerformanceTracker {
// Callback that updates UMA enumeration histograms.
typedef base::Callback<
- void(const std::string& histogram_name, int64 value, int histogram_max)>
+ void(const std::string& histogram_name, int64_t value, int histogram_max)>
UpdateUmaEnumHistogramCallback;
PerformanceTracker();
diff --git a/remoting/protocol/protocol_mock_objects.h b/remoting/protocol/protocol_mock_objects.h
index d37b575..ed5115f 100644
--- a/remoting/protocol/protocol_mock_objects.h
+++ b/remoting/protocol/protocol_mock_objects.h
@@ -81,8 +81,8 @@ class MockConnectionToClientEventHandler
void(ConnectionToClient* connection));
MOCK_METHOD2(OnConnectionClosed,
void(ConnectionToClient* connection, ErrorCode error));
- MOCK_METHOD2(OnEventTimestamp,
- void(ConnectionToClient* connection, int64 timestamp));
+ MOCK_METHOD2(OnInputEventReceived,
+ void(ConnectionToClient* connection, int64_t timestamp));
MOCK_METHOD3(OnRouteChange,
void(ConnectionToClient* connection,
const std::string& channel_name,