summaryrefslogtreecommitdiffstats
path: root/remoting/base
diff options
context:
space:
mode:
Diffstat (limited to 'remoting/base')
-rw-r--r--remoting/base/capture_data.cc2
-rw-r--r--remoting/base/capture_data.h8
-rw-r--r--remoting/base/decoder.h41
-rw-r--r--remoting/base/decoder_row_based.cc103
-rw-r--r--remoting/base/decoder_row_based.h24
-rw-r--r--remoting/base/decoder_vp8.cc19
-rw-r--r--remoting/base/decoder_vp8.h14
-rw-r--r--remoting/base/encoder.h3
-rw-r--r--remoting/base/encoder_verbatim.cc6
-rw-r--r--remoting/base/encoder_vp8.cc9
-rw-r--r--remoting/base/encoder_zlib.cc5
-rw-r--r--remoting/base/util.cc12
-rw-r--r--remoting/base/util.h5
13 files changed, 155 insertions, 96 deletions
diff --git a/remoting/base/capture_data.cc b/remoting/base/capture_data.cc
index 2d96441..8bf464d 100644
--- a/remoting/base/capture_data.cc
+++ b/remoting/base/capture_data.cc
@@ -16,7 +16,7 @@ DataPlanes::DataPlanes() {
CaptureData::CaptureData(const DataPlanes &data_planes,
int width,
int height,
- PixelFormat format) :
+ media::VideoFrame::Format format) :
data_planes_(data_planes), dirty_rects_(),
width_(width), height_(height), pixel_format_(format) {
}
diff --git a/remoting/base/capture_data.h b/remoting/base/capture_data.h
index 97631ab..f399e70 100644
--- a/remoting/base/capture_data.h
+++ b/remoting/base/capture_data.h
@@ -9,8 +9,8 @@
#include "base/basictypes.h"
#include "base/ref_counted.h"
+#include "media/base/video_frame.h"
#include "remoting/base/types.h"
-#include "remoting/proto/video.pb.h"
namespace remoting {
@@ -29,7 +29,7 @@ class CaptureData : public base::RefCountedThreadSafe<CaptureData> {
CaptureData(const DataPlanes &data_planes,
int width,
int height,
- PixelFormat format);
+ media::VideoFrame::Format format);
// Get the data_planes data of the last capture.
const DataPlanes& data_planes() const { return data_planes_; }
@@ -45,7 +45,7 @@ class CaptureData : public base::RefCountedThreadSafe<CaptureData> {
int height() const { return height_; }
// Get the pixel format of the image captured.
- PixelFormat pixel_format() const { return pixel_format_; }
+ media::VideoFrame::Format pixel_format() const { return pixel_format_; }
// Mutating methods.
InvalidRects& mutable_dirty_rects() { return dirty_rects_; }
@@ -55,7 +55,7 @@ class CaptureData : public base::RefCountedThreadSafe<CaptureData> {
InvalidRects dirty_rects_;
int width_;
int height_;
- PixelFormat pixel_format_;
+ media::VideoFrame::Format pixel_format_;
friend class base::RefCountedThreadSafe<CaptureData>;
virtual ~CaptureData();
diff --git a/remoting/base/decoder.h b/remoting/base/decoder.h
index 82211a7..bcb5a54 100644
--- a/remoting/base/decoder.h
+++ b/remoting/base/decoder.h
@@ -5,6 +5,8 @@
#ifndef REMOTING_BASE_DECODER_H_
#define REMOTING_BASE_DECODER_H_
+#include <vector>
+
#include "base/task.h"
#include "base/scoped_ptr.h"
#include "gfx/rect.h"
@@ -21,34 +23,39 @@ typedef std::vector<gfx::Rect> UpdatedRects;
// TODO(ajwong): Beef up this documentation once the API stablizes.
class Decoder {
public:
+ // DecodeResult is returned from DecodePacket() and indicates current state
+ // of the decoder. DECODE_DONE means that last packet for the frame was
+ // processed, and the frame can be displayed now. DECODE_IN_PROGRESS
+ // indicates that the decoder must receive more data before the frame can be
+ // displayed. DECODE_ERROR is returned if there was an error in the stream.
+ enum DecodeResult {
+ DECODE_ERROR = -1,
+ DECODE_IN_PROGRESS,
+ DECODE_DONE,
+ };
+
Decoder() {}
virtual ~Decoder() {}
- // TODO(ajwong): This API is incorrect in the face of a streaming decode
- // protocol like VP8. However, it breaks the layering abstraction by
- // depending on the network packet protocol buffer type. I'm going to go
- // forward with it as is, and then refactor again to support streaming
- // decodes.
-
// Initializes the decoder to draw into the given |frame|. The |clip|
// specifies the region to draw into. The clip region must fit inside
- // the dimensions of frame. Failure to do so will CHECK Fail.
- //
- // TODO(ajwong): Should this take the source pixel format?
- // TODO(ajwong): Should the protocol be split into basic-types followed
- // by packet types? Basic types might include the format enum below.
- virtual void Initialize(scoped_refptr<media::VideoFrame> frame,
- const gfx::Rect& clip, int bytes_per_src_pixel) = 0;
+ // the dimensions of frame. Failure to do so will CHECK fail.
+ virtual void Initialize(scoped_refptr<media::VideoFrame> frame) = 0;
+
+ // Feeds more data into the decoder.
+ virtual DecodeResult DecodePacket(const VideoPacket* packet) = 0;
+
+ // Returns rects that were updated in the last frame. Can be called only
+ // after DecodePacket returned DECODE_DONE. Caller keeps ownership of
+ // |rects|. |rects| is kept empty if whole screen needs to be updated.
+ virtual void GetUpdatedRects(UpdatedRects* rects) = 0;
// Reset the decoder to an uninitialized state. Release all references to
// the initialized |frame|. Initialize() must be called before the decoder
// is used again.
virtual void Reset() = 0;
- // Feeds more data into the decoder.
- virtual void DecodeBytes(const std::string& encoded_bytes) = 0;
-
- // Returns true if decoder is ready to accept data via ProcessRectangleData.
+ // Returns true if decoder is ready to accept data via DecodePacket.
virtual bool IsReadyForData() = 0;
virtual VideoPacketFormat::Encoding Encoding() = 0;
diff --git a/remoting/base/decoder_row_based.cc b/remoting/base/decoder_row_based.cc
index d378e96..1a0efad 100644
--- a/remoting/base/decoder_row_based.cc
+++ b/remoting/base/decoder_row_based.cc
@@ -11,6 +11,11 @@
namespace remoting {
+namespace {
+// Both input and output data are assumed to be RGBA32.
+const int kBytesPerPixel = 4;
+}
+
DecoderRowBased* DecoderRowBased::CreateZlibDecoder() {
return new DecoderRowBased(new DecompressorZlib(),
VideoPacketFormat::ENCODING_ZLIB);
@@ -26,7 +31,6 @@ DecoderRowBased::DecoderRowBased(Decompressor* decompressor,
: state_(kUninitialized),
decompressor_(decompressor),
encoding_(encoding),
- bytes_per_src_pixel_(0),
row_pos_(0),
row_y_(0),
// TODO(hclam): We should use the information from the update stream
@@ -45,38 +49,35 @@ void DecoderRowBased::Reset() {
}
bool DecoderRowBased::IsReadyForData() {
- return state_ == kReady;
+ return state_ == kReady || state_ == kProcessing || state_ == kDone;
}
-void DecoderRowBased::Initialize(scoped_refptr<media::VideoFrame> frame,
- const gfx::Rect& clip,
- int bytes_per_src_pixel) {
+void DecoderRowBased::Initialize(scoped_refptr<media::VideoFrame> frame) {
// Make sure we are not currently initialized.
CHECK_EQ(kUninitialized, state_);
- if (static_cast<PixelFormat>(frame->format()) != PIXEL_FORMAT_RGB32) {
+ if (frame->format() != media::VideoFrame::RGB32) {
LOG(WARNING) << "DecoderRowBased only supports RGB32.";
state_ = kError;
return;
}
frame_ = frame;
-
- // Reset the buffer location status variables.
- clip_ = clip;
- row_pos_ = 0;
- row_y_ = 0;
- bytes_per_src_pixel_ = bytes_per_src_pixel;
-
state_ = kReady;
}
-void DecoderRowBased::DecodeBytes(const std::string& encoded_bytes) {
- DCHECK_EQ(kReady, state_);
+Decoder::DecodeResult DecoderRowBased::DecodePacket(
+ const VideoPacket* packet) {
+ UpdateStateForPacket(packet);
- const uint8* in = reinterpret_cast<const uint8*>(encoded_bytes.data());
- const int in_size = encoded_bytes.size();
- const int row_size = clip_.width() * bytes_per_src_pixel_;
+ if (state_ == kError) {
+ return DECODE_ERROR;
+ }
+
+ const uint8* in = reinterpret_cast<const uint8*>(packet->data().data());
+ const int in_size = packet->data().size();
+
+ const int row_size = clip_.width() * kBytesPerPixel;
int stride = frame_->stride(media::VideoFrame::kRGBPlane);
uint8* rect_begin = frame_->data(media::VideoFrame::kRGBPlane);
@@ -88,15 +89,19 @@ void DecoderRowBased::DecodeBytes(const std::string& encoded_bytes) {
stride = -stride;
}
- // TODO(ajwong): This should be bytes_per_dst_pixel shouldn't this.?
- uint8* out = rect_begin +
- stride * (clip_.y() + row_y_) +
- bytes_per_src_pixel_ * clip_.x();
+ uint8* out = rect_begin + stride * (clip_.y() + row_y_) +
+ kBytesPerPixel * clip_.x();
// Consume all the data in the message.
bool decompress_again = true;
int used = 0;
while (decompress_again && used < in_size) {
+ if (row_y_ >= clip_.height()) {
+ state_ = kError;
+ LOG(WARNING) << "Too much data is received for the given rectangle.";
+ return DECODE_ERROR;
+ }
+
int written = 0;
int consumed = 0;
// TODO(ajwong): This assume source and dest stride are the same, which is
@@ -114,6 +119,60 @@ void DecoderRowBased::DecodeBytes(const std::string& encoded_bytes) {
out += stride;
}
}
+
+ if (state_ == kDone && row_y_ < clip_.height()) {
+ state_ = kError;
+ LOG(WARNING) << "Received LAST_PACKET, but didn't get enough data.";
+ return DECODE_ERROR;
+ }
+
+ return state_ == kDone ? DECODE_DONE : DECODE_IN_PROGRESS;
+}
+
+void DecoderRowBased::UpdateStateForPacket(const VideoPacket* packet) {
+ if (state_ == kError) {
+ return;
+ }
+
+ if (packet->flags() & VideoPacket::FIRST_PACKET) {
+ if (state_ != kReady && state_ != kDone) {
+ state_ = kError;
+ LOG(WARNING) << "Received unexpected FIRST_PACKET.";
+ return;
+ }
+ state_ = kProcessing;
+
+ // Reset the buffer location status variables on the first packet.
+ clip_.SetRect(packet->format().x(), packet->format().y(),
+ packet->format().width(), packet->format().height());
+ row_pos_ = 0;
+ row_y_ = 0;
+ }
+
+ if (state_ != kProcessing) {
+ state_ = kError;
+ LOG(WARNING) << "Received unexpected packet.";
+ return;
+ }
+
+ if (packet->flags() & VideoPacket::LAST_PACKET) {
+ if (state_ != kProcessing) {
+ state_ = kError;
+ LOG(WARNING) << "Received unexpected LAST_PACKET.";
+ return;
+ }
+ state_ = kDone;
+ }
+
+ return;
+}
+
+void DecoderRowBased::GetUpdatedRects(UpdatedRects* rects) {
+ rects->push_back(clip_);
+}
+
+VideoPacketFormat::Encoding DecoderRowBased::Encoding() {
+ return encoding_;
}
} // namespace remoting
diff --git a/remoting/base/decoder_row_based.h b/remoting/base/decoder_row_based.h
index 2deb897..da05c05 100644
--- a/remoting/base/decoder_row_based.h
+++ b/remoting/base/decoder_row_based.h
@@ -19,12 +19,12 @@ class DecoderRowBased : public Decoder {
static DecoderRowBased* CreateVerbatimDecoder();
// Decoder implementation.
- virtual void Reset();
virtual bool IsReadyForData();
- virtual void Initialize(scoped_refptr<media::VideoFrame> frame,
- const gfx::Rect& clip, int bytes_per_src_pixel);
- virtual void DecodeBytes(const std::string& encoded_bytes);
- virtual VideoPacketFormat::Encoding Encoding() { return encoding_; }
+ virtual void Initialize(scoped_refptr<media::VideoFrame> frame);
+ virtual DecodeResult DecodePacket(const VideoPacket* packet);
+ virtual void GetUpdatedRects(UpdatedRects* rects);
+ virtual void Reset();
+ virtual VideoPacketFormat::Encoding Encoding();
// TODO(hclam): Should make this into the Decoder interface.
// TODO(ajwong): Before putting into the interface, we should decide if the
@@ -32,15 +32,20 @@ class DecoderRowBased : public Decoder {
void set_reverse_rows(bool reverse) { reverse_rows_ = reverse; }
private:
- DecoderRowBased(Decompressor* decompressor,
- VideoPacketFormat::Encoding encoding);
-
enum State {
kUninitialized,
kReady,
+ kProcessing,
+ kDone,
kError,
};
+ DecoderRowBased(Decompressor* decompressor,
+ VideoPacketFormat::Encoding encoding);
+
+ // Helper method. Called from DecodePacket to updated state of the decoder.
+ void UpdateStateForPacket(const VideoPacket* packet);
+
// The internal state of the decoder.
State state_;
@@ -56,9 +61,6 @@ class DecoderRowBased : public Decoder {
// The encoding of the incoming stream.
VideoPacketFormat::Encoding encoding_;
- // Number of bytes per pixel from source stream.
- int bytes_per_src_pixel_;
-
// The position in the row that we are updating.
int row_pos_;
diff --git a/remoting/base/decoder_vp8.cc b/remoting/base/decoder_vp8.cc
index 46a6d31..516e9f4 100644
--- a/remoting/base/decoder_vp8.cc
+++ b/remoting/base/decoder_vp8.cc
@@ -30,8 +30,7 @@ DecoderVp8::~DecoderVp8() {
delete codec_;
}
-void DecoderVp8::Initialize(scoped_refptr<media::VideoFrame> frame,
- const gfx::Rect& clip, int bytes_per_src_pixel) {
+void DecoderVp8::Initialize(scoped_refptr<media::VideoFrame> frame) {
DCHECK_EQ(kUninitialized, state_);
if (frame->format() != media::VideoFrame::RGB32) {
@@ -44,7 +43,7 @@ void DecoderVp8::Initialize(scoped_refptr<media::VideoFrame> frame,
state_ = kReady;
}
-void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) {
+Decoder::DecodeResult DecoderVp8::DecodePacket(const VideoPacket* packet) {
DCHECK_EQ(kReady, state_);
// Initialize the codec as needed.
@@ -59,19 +58,19 @@ void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) {
delete codec_;
codec_ = NULL;
state_ = kError;
- return;
+ return DECODE_ERROR;
}
}
// Do the actual decoding.
vpx_codec_err_t ret = vpx_codec_decode(
- codec_, reinterpret_cast<const uint8*>(encoded_bytes.data()),
- encoded_bytes.size(), NULL, 0);
+ codec_, reinterpret_cast<const uint8*>(packet->data().data()),
+ packet->data().size(), NULL, 0);
if (ret != VPX_CODEC_OK) {
LOG(INFO) << "Decoding failed:" << vpx_codec_err_to_string(ret) << "\n"
<< "Details: " << vpx_codec_error(codec_) << "\n"
<< vpx_codec_error_detail(codec_);
- return;
+ return DECODE_ERROR;
}
// Gets the decoded data.
@@ -79,7 +78,7 @@ void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) {
vpx_image_t* image = vpx_codec_get_frame(codec_, &iter);
if (!image) {
LOG(INFO) << "No video frame decoded";
- return;
+ return DECODE_ERROR;
}
// Perform YUV conversion.
@@ -89,6 +88,10 @@ void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) {
image->stride[0], image->stride[1],
frame_->stride(media::VideoFrame::kRGBPlane),
media::YV12);
+ return DECODE_DONE;
+}
+
+void DecoderVp8::GetUpdatedRects(UpdatedRects* rects) {
}
void DecoderVp8::Reset() {
diff --git a/remoting/base/decoder_vp8.h b/remoting/base/decoder_vp8.h
index dfef0b7..ebe082a 100644
--- a/remoting/base/decoder_vp8.h
+++ b/remoting/base/decoder_vp8.h
@@ -17,17 +17,11 @@ class DecoderVp8 : public Decoder {
virtual ~DecoderVp8();
// Decoder implementations.
- virtual void Initialize(scoped_refptr<media::VideoFrame> frame,
- const gfx::Rect& clip, int bytes_per_src_pixel);
-
- virtual void Reset();
-
- // Feeds more data into the decoder.
- virtual void DecodeBytes(const std::string& encoded_bytes);
-
- // Returns true if decoder is ready to accept data via ProcessRectangleData.
+ virtual void Initialize(scoped_refptr<media::VideoFrame> frame);
+ virtual DecodeResult DecodePacket(const VideoPacket* packet);
+ virtual void GetUpdatedRects(UpdatedRects* rects);
virtual bool IsReadyForData();
-
+ virtual void Reset();
virtual VideoPacketFormat::Encoding Encoding();
private:
diff --git a/remoting/base/encoder.h b/remoting/base/encoder.h
index 05e40ff..e34e155 100644
--- a/remoting/base/encoder.h
+++ b/remoting/base/encoder.h
@@ -8,8 +8,6 @@
#include "base/basictypes.h"
#include "base/callback.h"
#include "media/base/data_buffer.h"
-// TODO(hclam): Should not depend on internal.pb.h.
-#include "remoting/proto/internal.pb.h"
namespace media {
class DataBuffer;
@@ -18,6 +16,7 @@ namespace media {
namespace remoting {
class CaptureData;
+class VideoPacket;
// A class to perform the task of encoding a continous stream of
// images.
diff --git a/remoting/base/encoder_verbatim.cc b/remoting/base/encoder_verbatim.cc
index 0bbe4e8..1185bff 100644
--- a/remoting/base/encoder_verbatim.cc
+++ b/remoting/base/encoder_verbatim.cc
@@ -6,9 +6,10 @@
#include "base/logging.h"
#include "gfx/rect.h"
-#include "media/base/data_buffer.h"
+#include "net/base/io_buffer.h"
#include "remoting/base/capture_data.h"
#include "remoting/base/util.h"
+#include "remoting/proto/video.pb.h"
namespace remoting {
@@ -17,8 +18,6 @@ namespace remoting {
// Add support for splitting across packets and remove the 10*.
static const int kPacketSize = 10 * 1024 * 1024;
-using media::DataBuffer;
-
EncoderVerbatim::EncoderVerbatim()
: packet_size_(kPacketSize) {
}
@@ -87,7 +86,6 @@ void EncoderVerbatim::PrepareUpdateStart(const gfx::Rect& rect,
format->set_width(rect.width());
format->set_height(rect.height());
format->set_encoding(VideoPacketFormat::ENCODING_VERBATIM);
- format->set_pixel_format(capture_data_->pixel_format());
}
uint8* EncoderVerbatim::GetOutputBuffer(VideoPacket* packet, size_t size) {
diff --git a/remoting/base/encoder_vp8.cc b/remoting/base/encoder_vp8.cc
index 3a0f75b..dd85fb2 100644
--- a/remoting/base/encoder_vp8.cc
+++ b/remoting/base/encoder_vp8.cc
@@ -4,10 +4,10 @@
#include "base/logging.h"
#include "media/base/callback.h"
-#include "media/base/data_buffer.h"
#include "media/base/media.h"
#include "remoting/base/capture_data.h"
#include "remoting/base/encoder_vp8.h"
+#include "remoting/proto/video.pb.h"
extern "C" {
#define VPX_CODEC_DISABLE_COMPAT 1
@@ -114,7 +114,7 @@ bool EncoderVp8::PrepareImage(scoped_refptr<CaptureData> capture_data) {
// giv ae gray scale image after conversion.
// TODO(sergeyu): Move this code to a separate routine.
// TODO(sergeyu): Optimize this code.
- DCHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32)
+ DCHECK(capture_data->pixel_format() == media::VideoFrame::RGB32)
<< "Only RGB32 is supported";
uint8* in = capture_data->data_planes().data[0];
const int in_stride = capture_data->data_planes().strides[0];
@@ -197,11 +197,6 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data,
message->mutable_format()->set_encoding(VideoPacketFormat::ENCODING_VP8);
message->set_flags(VideoPacket::FIRST_PACKET | VideoPacket::LAST_PACKET);
- message->mutable_format()->set_pixel_format(PIXEL_FORMAT_RGB32);
- message->mutable_format()->set_x(0);
- message->mutable_format()->set_y(0);
- message->mutable_format()->set_width(capture_data->width());
- message->mutable_format()->set_height(capture_data->height());
data_available_callback->Run(message);
delete data_available_callback;
diff --git a/remoting/base/encoder_zlib.cc b/remoting/base/encoder_zlib.cc
index e184fd3..5fc1afe 100644
--- a/remoting/base/encoder_zlib.cc
+++ b/remoting/base/encoder_zlib.cc
@@ -6,10 +6,10 @@
#include "base/logging.h"
#include "gfx/rect.h"
-#include "media/base/data_buffer.h"
#include "remoting/base/capture_data.h"
#include "remoting/base/compressor_zlib.h"
#include "remoting/base/util.h"
+#include "remoting/proto/video.pb.h"
namespace remoting {
@@ -26,7 +26,7 @@ EncoderZlib::~EncoderZlib() {}
void EncoderZlib::Encode(scoped_refptr<CaptureData> capture_data,
bool key_frame,
DataAvailableCallback* data_available_callback) {
- CHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32)
+ CHECK(capture_data->pixel_format() == media::VideoFrame::RGB32)
<< "Zlib Encoder only works with RGB32. Got "
<< capture_data->pixel_format();
capture_data_ = capture_data;
@@ -118,7 +118,6 @@ void EncoderZlib::PrepareUpdateStart(const gfx::Rect& rect,
format->set_width(rect.width());
format->set_height(rect.height());
format->set_encoding(VideoPacketFormat::ENCODING_ZLIB);
- format->set_pixel_format(capture_data_->pixel_format());
}
uint8* EncoderZlib::GetOutputBuffer(VideoPacket* packet, size_t size) {
diff --git a/remoting/base/util.cc b/remoting/base/util.cc
index f24d322..5cf70fe 100644
--- a/remoting/base/util.cc
+++ b/remoting/base/util.cc
@@ -6,17 +6,19 @@
#include "base/logging.h"
+using media::VideoFrame;
+
namespace remoting {
-int GetBytesPerPixel(PixelFormat format) {
+int GetBytesPerPixel(VideoFrame::Format format) {
// Note: The order is important here for performance. This is sorted from the
// most common to the less common (PIXEL_FORMAT_ASCII is mostly used
// just for testing).
switch (format) {
- case PIXEL_FORMAT_RGB24: return 3;
- case PIXEL_FORMAT_RGB565: return 2;
- case PIXEL_FORMAT_RGB32: return 4;
- case PIXEL_FORMAT_ASCII: return 1;
+ case VideoFrame::RGB24: return 3;
+ case VideoFrame::RGB565: return 2;
+ case VideoFrame::RGB32: return 4;
+ case VideoFrame::ASCII: return 1;
default:
NOTREACHED() << "Pixel format not supported";
return 0;
diff --git a/remoting/base/util.h b/remoting/base/util.h
index d7f4128..bf5cff5 100644
--- a/remoting/base/util.h
+++ b/remoting/base/util.h
@@ -5,11 +5,12 @@
#ifndef REMOTING_BASE_UTIL_H_
#define REMOTING_BASE_UTIL_H_
-#include "remoting/proto/video.pb.h"
+#include "media/base/video_frame.h"
namespace remoting {
-int GetBytesPerPixel(PixelFormat format);
+// TODO(sergeyu): Move this to media::VideoFrame.
+int GetBytesPerPixel(media::VideoFrame::Format format);
} // namespace remoting