summaryrefslogtreecommitdiffstats
path: root/remoting/base
diff options
context:
space:
mode:
Diffstat (limited to 'remoting/base')
-rw-r--r--remoting/base/capture_data.h2
-rw-r--r--remoting/base/codec_test.cc6
-rw-r--r--remoting/base/decoder.h4
-rw-r--r--remoting/base/decoder_row_based.cc10
-rw-r--r--remoting/base/decoder_row_based.h7
-rw-r--r--remoting/base/decoder_vp8.cc114
-rw-r--r--remoting/base/decoder_vp8.h39
-rw-r--r--remoting/base/encoder.h13
-rw-r--r--remoting/base/encoder_verbatim.cc44
-rw-r--r--remoting/base/encoder_verbatim.h7
-rw-r--r--remoting/base/encoder_vp8.cc77
-rw-r--r--remoting/base/encoder_zlib.cc56
-rw-r--r--remoting/base/encoder_zlib.h7
-rw-r--r--remoting/base/multiple_array_input_stream_unittest.cc4
-rw-r--r--remoting/base/util.cc10
-rw-r--r--remoting/base/util.h2
16 files changed, 159 insertions, 243 deletions
diff --git a/remoting/base/capture_data.h b/remoting/base/capture_data.h
index 0d707f9..97631ab 100644
--- a/remoting/base/capture_data.h
+++ b/remoting/base/capture_data.h
@@ -10,7 +10,7 @@
#include "base/basictypes.h"
#include "base/ref_counted.h"
#include "remoting/base/types.h"
-#include "remoting/proto/event.pb.h"
+#include "remoting/proto/video.pb.h"
namespace remoting {
diff --git a/remoting/base/codec_test.cc b/remoting/base/codec_test.cc
index 62b98607..dcc165e 100644
--- a/remoting/base/codec_test.cc
+++ b/remoting/base/codec_test.cc
@@ -317,7 +317,7 @@ class EncoderTester {
scoped_refptr<CaptureData> PrepareEncodeData(PixelFormat format,
uint8** memory) {
// TODO(hclam): Support also YUV format.
- CHECK(format == PixelFormatRgb32);
+ CHECK(format == PIXEL_FORMAT_RGB32);
int size = kWidth * kHeight * kBytesPerPixel;
*memory = new uint8[size];
@@ -360,7 +360,7 @@ void TestEncoder(Encoder* encoder, bool strict) {
uint8* memory;
scoped_refptr<CaptureData> data =
- PrepareEncodeData(PixelFormatRgb32, &memory);
+ PrepareEncodeData(PIXEL_FORMAT_RGB32, &memory);
TestEncodingRects(encoder, &tester, data, kTestRects, 1);
TestEncodingRects(encoder, &tester, data, kTestRects + 1, 1);
@@ -412,7 +412,7 @@ void TestEncoderDecoder(Encoder* encoder, Decoder* decoder, bool strict) {
uint8* memory;
scoped_refptr<CaptureData> data =
- PrepareEncodeData(PixelFormatRgb32, &memory);
+ PrepareEncodeData(PIXEL_FORMAT_RGB32, &memory);
DecoderTester decoder_tester(decoder);
decoder_tester.set_strict(strict);
decoder_tester.set_capture_data(data);
diff --git a/remoting/base/decoder.h b/remoting/base/decoder.h
index 0a6ceff..82211a7 100644
--- a/remoting/base/decoder.h
+++ b/remoting/base/decoder.h
@@ -9,7 +9,7 @@
#include "base/scoped_ptr.h"
#include "gfx/rect.h"
#include "media/base/video_frame.h"
-#include "remoting/proto/event.pb.h"
+#include "remoting/proto/video.pb.h"
namespace remoting {
@@ -51,7 +51,7 @@ class Decoder {
// Returns true if decoder is ready to accept data via ProcessRectangleData.
virtual bool IsReadyForData() = 0;
- virtual UpdateStreamEncoding Encoding() = 0;
+ virtual VideoPacketFormat::Encoding Encoding() = 0;
};
} // namespace remoting
diff --git a/remoting/base/decoder_row_based.cc b/remoting/base/decoder_row_based.cc
index 65c74c5..d378e96 100644
--- a/remoting/base/decoder_row_based.cc
+++ b/remoting/base/decoder_row_based.cc
@@ -12,15 +12,17 @@
namespace remoting {
DecoderRowBased* DecoderRowBased::CreateZlibDecoder() {
- return new DecoderRowBased(new DecompressorZlib(), EncodingZlib);
+ return new DecoderRowBased(new DecompressorZlib(),
+ VideoPacketFormat::ENCODING_ZLIB);
}
DecoderRowBased* DecoderRowBased::CreateVerbatimDecoder() {
- return new DecoderRowBased(new DecompressorVerbatim(), EncodingNone);
+ return new DecoderRowBased(new DecompressorVerbatim(),
+ VideoPacketFormat::ENCODING_VERBATIM);
}
DecoderRowBased::DecoderRowBased(Decompressor* decompressor,
- UpdateStreamEncoding encoding)
+ VideoPacketFormat::Encoding encoding)
: state_(kUninitialized),
decompressor_(decompressor),
encoding_(encoding),
@@ -52,7 +54,7 @@ void DecoderRowBased::Initialize(scoped_refptr<media::VideoFrame> frame,
// Make sure we are not currently initialized.
CHECK_EQ(kUninitialized, state_);
- if (static_cast<PixelFormat>(frame->format()) != PixelFormatRgb32) {
+ if (static_cast<PixelFormat>(frame->format()) != PIXEL_FORMAT_RGB32) {
LOG(WARNING) << "DecoderRowBased only supports RGB32.";
state_ = kError;
return;
diff --git a/remoting/base/decoder_row_based.h b/remoting/base/decoder_row_based.h
index c226db2..2deb897 100644
--- a/remoting/base/decoder_row_based.h
+++ b/remoting/base/decoder_row_based.h
@@ -24,7 +24,7 @@ class DecoderRowBased : public Decoder {
virtual void Initialize(scoped_refptr<media::VideoFrame> frame,
const gfx::Rect& clip, int bytes_per_src_pixel);
virtual void DecodeBytes(const std::string& encoded_bytes);
- virtual UpdateStreamEncoding Encoding() { return encoding_; }
+ virtual VideoPacketFormat::Encoding Encoding() { return encoding_; }
// TODO(hclam): Should make this into the Decoder interface.
// TODO(ajwong): Before putting into the interface, we should decide if the
@@ -32,7 +32,8 @@ class DecoderRowBased : public Decoder {
void set_reverse_rows(bool reverse) { reverse_rows_ = reverse; }
private:
- DecoderRowBased(Decompressor* decompressor, UpdateStreamEncoding encoding);
+ DecoderRowBased(Decompressor* decompressor,
+ VideoPacketFormat::Encoding encoding);
enum State {
kUninitialized,
@@ -53,7 +54,7 @@ class DecoderRowBased : public Decoder {
scoped_ptr<Decompressor> decompressor_;
// The encoding of the incoming stream.
- UpdateStreamEncoding encoding_;
+ VideoPacketFormat::Encoding encoding_;
// Number of bytes per pixel from source stream.
int bytes_per_src_pixel_;
diff --git a/remoting/base/decoder_vp8.cc b/remoting/base/decoder_vp8.cc
index 452cd1f..a8d5245 100644
--- a/remoting/base/decoder_vp8.cc
+++ b/remoting/base/decoder_vp8.cc
@@ -18,12 +18,7 @@ extern "C" {
namespace remoting {
DecoderVp8::DecoderVp8()
- : state_(kWaitingForBeginRect),
- rect_x_(0),
- rect_y_(0),
- rect_width_(0),
- rect_height_(0),
- updated_rects_(NULL),
+ : state_(kUninitialized),
codec_(NULL) {
}
@@ -35,71 +30,22 @@ DecoderVp8::~DecoderVp8() {
delete codec_;
}
-bool DecoderVp8::BeginDecode(scoped_refptr<media::VideoFrame> frame,
- UpdatedRects* updated_rects,
- Task* partial_decode_done,
- Task* decode_done) {
- DCHECK(!partial_decode_done_.get());
- DCHECK(!decode_done_.get());
- DCHECK(!updated_rects_);
- DCHECK_EQ(kWaitingForBeginRect, state_);
-
- partial_decode_done_.reset(partial_decode_done);
- decode_done_.reset(decode_done);
- updated_rects_ = updated_rects;
+void DecoderVp8::Initialize(scoped_refptr<media::VideoFrame> frame,
+ const gfx::Rect& clip, int bytes_per_src_pixel) {
+ DCHECK_EQ(kUninitialized, state_);
if (frame->format() != media::VideoFrame::RGB32) {
LOG(INFO) << "DecoderVp8 only supports RGB32 as output";
- return false;
+ state_ = kError;
+ return;
}
frame_ = frame;
- return true;
-}
-
-bool DecoderVp8::PartialDecode(ChromotingHostMessage* message) {
- scoped_ptr<ChromotingHostMessage> msg_deleter(message);
- DCHECK(message->has_update_stream_packet());
-
- bool ret = true;
- if (message->update_stream_packet().has_begin_rect())
- ret = HandleBeginRect(message);
- if (ret && message->update_stream_packet().has_rect_data())
- ret = HandleRectData(message);
- if (ret && message->update_stream_packet().has_end_rect())
- ret = HandleEndRect(message);
- return ret;
-}
-void DecoderVp8::EndDecode() {
- DCHECK_EQ(kWaitingForBeginRect, state_);
- decode_done_->Run();
-
- partial_decode_done_.reset();
- decode_done_.reset();
- frame_ = NULL;
- updated_rects_ = NULL;
+ state_ = kReady;
}
-bool DecoderVp8::HandleBeginRect(ChromotingHostMessage* message) {
- DCHECK_EQ(kWaitingForBeginRect, state_);
- state_ = kWaitingForRectData;
-
- rect_width_ = message->update_stream_packet().begin_rect().width();
- rect_height_ = message->update_stream_packet().begin_rect().height();
- rect_x_ = message->update_stream_packet().begin_rect().x();
- rect_y_ = message->update_stream_packet().begin_rect().y();
-
- PixelFormat pixel_format =
- message->update_stream_packet().begin_rect().pixel_format();
- if (pixel_format != PixelFormatYv12)
- return false;
- return true;
-}
-
-bool DecoderVp8::HandleRectData(ChromotingHostMessage* message) {
- DCHECK_EQ(kWaitingForRectData, state_);
- DCHECK_EQ(0,
- message->update_stream_packet().rect_data().sequence_number());
+void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) {
+ DCHECK_EQ(kReady, state_);
// Initialize the codec as needed.
if (!codec_) {
@@ -112,25 +58,21 @@ bool DecoderVp8::HandleRectData(ChromotingHostMessage* message) {
LOG(INFO) << "Cannot initialize codec.";
delete codec_;
codec_ = NULL;
- return false;
+ state_ = kError;
+ return;
}
}
+ LOG(WARNING) << "Decoding " << encoded_bytes.size();
+
// Do the actual decoding.
vpx_codec_err_t ret = vpx_codec_decode(
- codec_,
- (uint8_t*)message->update_stream_packet().rect_data().data().c_str(),
- message->update_stream_packet().rect_data().data().size(),
- NULL, 0);
+ codec_, reinterpret_cast<const uint8*>(encoded_bytes.data()),
+ encoded_bytes.size(), NULL, 0);
if (ret != VPX_CODEC_OK) {
- LOG(INFO) << "Decoding failed:"
- << vpx_codec_err_to_string(ret)
- << "\n"
- << "Details: "
- << vpx_codec_error(codec_)
- << "\n"
+ LOG(INFO) << "Decoding failed:" << vpx_codec_err_to_string(ret) << "\n"
+ << "Details: " << vpx_codec_error(codec_) << "\n"
<< vpx_codec_error_detail(codec_);
- return false;
}
// Gets the decoded data.
@@ -138,28 +80,28 @@ bool DecoderVp8::HandleRectData(ChromotingHostMessage* message) {
vpx_image_t* image = vpx_codec_get_frame(codec_, &iter);
if (!image) {
LOG(INFO) << "No video frame decoded";
- return false;
}
// Perform YUV conversion.
media::ConvertYUVToRGB32(image->planes[0], image->planes[1], image->planes[2],
frame_->data(media::VideoFrame::kRGBPlane),
- rect_width_, rect_height_,
+ frame_->width(), frame_->height(),
image->stride[0], image->stride[1],
frame_->stride(media::VideoFrame::kRGBPlane),
media::YV12);
+}
+
+void DecoderVp8::Reset() {
+ frame_ = NULL;
+ state_ = kUninitialized;
+}
- updated_rects_->clear();
- updated_rects_->push_back(gfx::Rect(rect_x_, rect_y_,
- rect_width_, rect_height_));
- partial_decode_done_->Run();
- return true;
+bool DecoderVp8::IsReadyForData() {
+ return state_ == kReady;
}
-bool DecoderVp8::HandleEndRect(ChromotingHostMessage* message) {
- DCHECK_EQ(kWaitingForRectData, state_);
- state_ = kWaitingForBeginRect;
- return true;
+VideoPacketFormat::Encoding DecoderVp8::Encoding() {
+ return VideoPacketFormat::ENCODING_VP8;
}
} // namespace remoting
diff --git a/remoting/base/decoder_vp8.h b/remoting/base/decoder_vp8.h
index 5fe0169..dfef0b7 100644
--- a/remoting/base/decoder_vp8.h
+++ b/remoting/base/decoder_vp8.h
@@ -14,37 +14,34 @@ namespace remoting {
class DecoderVp8 : public Decoder {
public:
DecoderVp8();
- ~DecoderVp8();
+ virtual ~DecoderVp8();
// Decoder implementations.
- virtual bool BeginDecode(scoped_refptr<media::VideoFrame> frame,
- UpdatedRects* update_rects,
- Task* partial_decode_done,
- Task* decode_done);
- virtual bool PartialDecode(ChromotingHostMessage* message);
- virtual void EndDecode();
+ virtual void Initialize(scoped_refptr<media::VideoFrame> frame,
+ const gfx::Rect& clip, int bytes_per_src_pixel);
+
+ virtual void Reset();
+
+ // Feeds more data into the decoder.
+ virtual void DecodeBytes(const std::string& encoded_bytes);
+
+ // Returns true if decoder is ready to accept data via ProcessRectangleData.
+ virtual bool IsReadyForData();
+
+ virtual VideoPacketFormat::Encoding Encoding();
private:
- bool HandleBeginRect(ChromotingHostMessage* message);
- bool HandleRectData(ChromotingHostMessage* message);
- bool HandleEndRect(ChromotingHostMessage* message);
+ enum State {
+ kUninitialized,
+ kReady,
+ kError,
+ };
// The internal state of the decoder.
State state_;
- // Keeps track of the updating rect.
- int rect_x_;
- int rect_y_;
- int rect_width_;
- int rect_height_;
-
- // Tasks to call when decode is done.
- scoped_ptr<Task> partial_decode_done_;
- scoped_ptr<Task> decode_done_;
-
// The video frame to write to.
scoped_refptr<media::VideoFrame> frame_;
- UpdatedRects* updated_rects_;
vpx_codec_ctx_t* codec_;
diff --git a/remoting/base/encoder.h b/remoting/base/encoder.h
index f9d4043..05e40ff 100644
--- a/remoting/base/encoder.h
+++ b/remoting/base/encoder.h
@@ -25,23 +25,12 @@ class CaptureData;
class Encoder {
public:
- // EncodingState is a bitfield that tracks the state of the encoding.
- // An encoding that consists of a single block could concievably be starting
- // inprogress and ended at the same time.
- enum {
- EncodingStarting = 1 << 0,
- EncodingInProgress = 1 << 1,
- EncodingEnded = 1 << 2
- };
- typedef int EncodingState;
-
// DataAvailableCallback is called as blocks of data are made available
// from the encoder. Data made available by the encoder is in the form
// of HostMessage to reduce the amount of memory copies.
// The callback takes ownership of the HostMessage and is responsible for
// deleting it.
- typedef Callback2<ChromotingHostMessage*,
- EncodingState>::Type DataAvailableCallback;
+ typedef Callback1<VideoPacket*>::Type DataAvailableCallback;
virtual ~Encoder() {}
diff --git a/remoting/base/encoder_verbatim.cc b/remoting/base/encoder_verbatim.cc
index 8c59a7d..0bbe4e8 100644
--- a/remoting/base/encoder_verbatim.cc
+++ b/remoting/base/encoder_verbatim.cc
@@ -52,15 +52,14 @@ void EncoderVerbatim::EncodeRect(const gfx::Rect& rect, size_t rect_index) {
const int bytes_per_pixel = GetBytesPerPixel(capture_data_->pixel_format());
const int row_size = bytes_per_pixel * rect.width();
- ChromotingHostMessage* message = new ChromotingHostMessage();
- RectangleUpdatePacket* update = message->mutable_rectangle_update();
- PrepareUpdateStart(rect, update);
+ VideoPacket* packet = new VideoPacket();
+ PrepareUpdateStart(rect, packet);
const uint8* in = capture_data_->data_planes().data[0] +
rect.y() * stride +
rect.x() * bytes_per_pixel;
// TODO(hclam): Fill in the sequence number.
- uint8* out = GetOutputBuffer(update, packet_size_);
+ uint8* out = GetOutputBuffer(packet, packet_size_);
int total_bytes = 0;
for (int y = 0; y < rect.height(); y++) {
memcpy(out, in, row_size);
@@ -70,49 +69,36 @@ void EncoderVerbatim::EncodeRect(const gfx::Rect& rect, size_t rect_index) {
}
// We have reached the end of stream.
- update->set_flags(update->flags() | RectangleUpdatePacket::LAST_PACKET);
+ packet->set_flags(packet->flags() | VideoPacket::LAST_PACKET);
// If we have filled the message or we have reached the end of stream.
- message->mutable_rectangle_update()->mutable_encoded_rect()->
- resize(total_bytes);
- SubmitMessage(message, rect_index);
+ packet->mutable_data()->resize(total_bytes);
+ SubmitMessage(packet, rect_index);
}
void EncoderVerbatim::PrepareUpdateStart(const gfx::Rect& rect,
- RectangleUpdatePacket* update) {
+ VideoPacket* packet) {
- update->set_flags(update->flags() | RectangleUpdatePacket::FIRST_PACKET);
- RectangleFormat* format = update->mutable_format();
+ packet->set_flags(packet->flags() | VideoPacket::FIRST_PACKET);
+ VideoPacketFormat* format = packet->mutable_format();
format->set_x(rect.x());
format->set_y(rect.y());
format->set_width(rect.width());
format->set_height(rect.height());
- format->set_encoding(EncodingNone);
+ format->set_encoding(VideoPacketFormat::ENCODING_VERBATIM);
format->set_pixel_format(capture_data_->pixel_format());
}
-uint8* EncoderVerbatim::GetOutputBuffer(RectangleUpdatePacket* update,
- size_t size) {
- update->mutable_encoded_rect()->resize(size);
+uint8* EncoderVerbatim::GetOutputBuffer(VideoPacket* packet, size_t size) {
+ packet->mutable_data()->resize(size);
// TODO(ajwong): Is there a better way to do this at all???
return const_cast<uint8*>(reinterpret_cast<const uint8*>(
- update->mutable_encoded_rect()->data()));
+ packet->mutable_data()->data()));
}
-void EncoderVerbatim::SubmitMessage(ChromotingHostMessage* message,
- size_t rect_index) {
- EncodingState state = EncodingInProgress;
- const RectangleUpdatePacket& update = message->rectangle_update();
- if (rect_index == 0 &&
- (update.flags() | RectangleUpdatePacket::FIRST_PACKET)) {
- state |= EncodingStarting;
- }
- if (rect_index == capture_data_->dirty_rects().size() - 1 &&
- (update.flags() | RectangleUpdatePacket::LAST_PACKET)) {
- state |= EncodingEnded;
- }
- callback_->Run(message, state);
+void EncoderVerbatim::SubmitMessage(VideoPacket* packet, size_t rect_index) {
+ callback_->Run(packet);
}
} // namespace remoting
diff --git a/remoting/base/encoder_verbatim.h b/remoting/base/encoder_verbatim.h
index 10fa7ac..81d109d 100644
--- a/remoting/base/encoder_verbatim.h
+++ b/remoting/base/encoder_verbatim.h
@@ -31,15 +31,14 @@ class EncoderVerbatim : public Encoder {
void EncodeRect(const gfx::Rect& rect, size_t rect_index);
// Marks a packets as the first in a series of rectangle updates.
- void PrepareUpdateStart(const gfx::Rect& rect,
- RectangleUpdatePacket* update);
+ void PrepareUpdateStart(const gfx::Rect& rect, VideoPacket* packet);
// Retrieves a pointer to the output buffer in |update| used for storing the
// encoded rectangle data. Will resize the buffer to |size|.
- uint8* GetOutputBuffer(RectangleUpdatePacket* update, size_t size);
+ uint8* GetOutputBuffer(VideoPacket* packet, size_t size);
// Submit |message| to |callback_|.
- void SubmitMessage(ChromotingHostMessage* message, size_t rect_index);
+ void SubmitMessage(VideoPacket* packet, size_t rect_index);
scoped_refptr<CaptureData> capture_data_;
scoped_ptr<DataAvailableCallback> callback_;
diff --git a/remoting/base/encoder_vp8.cc b/remoting/base/encoder_vp8.cc
index eec6ed5..7a58f7b 100644
--- a/remoting/base/encoder_vp8.cc
+++ b/remoting/base/encoder_vp8.cc
@@ -48,6 +48,7 @@ bool EncoderVp8::Init(int width, int height) {
vpx_codec_enc_cfg_t config;
const vpx_codec_iface_t* algo =
(const vpx_codec_iface_t*)media::GetVp8CxAlgoAddress();
+ CHECK(algo);
vpx_codec_err_t ret = vpx_codec_enc_config_default(algo, &config, 0);
if (ret != VPX_CODEC_OK)
return false;
@@ -70,9 +71,19 @@ bool EncoderVp8::Init(int width, int height) {
return true;
}
+static int clip_byte(int x) {
+ if (x > 255)
+ return 255;
+ else if (x < 0)
+ return 0;
+ else
+ return x;
+}
+
bool EncoderVp8::PrepareImage(scoped_refptr<CaptureData> capture_data) {
+ const int plane_size = capture_data->width() * capture_data->height();
+
if (!yuv_image_.get()) {
- const int plane_size = capture_data->width() * capture_data->height();
// YUV image size is 1.5 times of a plane. Multiplication is performed first
// to avoid rounding error.
@@ -100,22 +111,36 @@ bool EncoderVp8::PrepareImage(scoped_refptr<CaptureData> capture_data) {
// And then do RGB->YUV conversion.
// Currently we just produce the Y channel as the average of RGB. This will
- // give a gray scale image after conversion.
- // TODO(hclam): Implement the actual color space conversion.
- DCHECK(capture_data->pixel_format() == PixelFormatRgb32)
+ // giv ae gray scale image after conversion.
+ // TODO(sergeyu): Move this code to a separate routine.
+ // TODO(sergeyu): Optimize this code.
+ DCHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32)
<< "Only RGB32 is supported";
uint8* in = capture_data->data_planes().data[0];
const int in_stride = capture_data->data_planes().strides[0];
- uint8* out = yuv_image_.get();
+ uint8* y_out = yuv_image_.get();
+ uint8* u_out = yuv_image_.get() + plane_size;
+ uint8* v_out = yuv_image_.get() + plane_size + plane_size / 4;
const int out_stride = image_->stride[0];
for (int i = 0; i < capture_data->height(); ++i) {
for (int j = 0; j < capture_data->width(); ++j) {
// Since the input pixel format is RGB32, there are 4 bytes per pixel.
uint8* pixel = in + 4 * j;
- out[j] = (pixel[0] + pixel[1] + pixel[2]) / 3;
+ y_out[j] = clip_byte(((pixel[0] * 66 + pixel[1] * 129 +
+ pixel[2] * 25 + 128) >> 8) + 16);
+ if (i % 2 == 0 && j % 2 == 0) {
+ u_out[j / 2] = clip_byte(((pixel[0] * -38 + pixel[1] * -74 +
+ pixel[2] * 112 + 128) >> 8) + 128);
+ v_out[j / 2] = clip_byte(((pixel[0] * 112 + pixel[1] * -94 +
+ pixel[2] * -18 + 128) >> 8) + 128);
+ }
}
in += in_stride;
- out += out_stride;
+ y_out += out_stride;
+ if (i % 2 == 0) {
+ u_out += out_stride / 2;
+ v_out += out_stride / 2;
+ }
}
return true;
}
@@ -138,13 +163,10 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data,
vpx_codec_err_t ret = vpx_codec_encode(codec_.get(), image_.get(),
last_timestamp_,
1, 0, VPX_DL_REALTIME);
- DCHECK(ret == VPX_CODEC_OK) << "Encoding error: "
- << vpx_codec_err_to_string(ret)
- << "\n"
- << "Details: "
- << vpx_codec_error(codec_.get())
- << "\n"
- << vpx_codec_error_detail(codec_.get());
+ DCHECK_EQ(ret, VPX_CODEC_OK)
+ << "Encoding error: " << vpx_codec_err_to_string(ret) << "\n"
+ << "Details: " << vpx_codec_error(codec_.get()) << "\n"
+ << vpx_codec_error_detail(codec_.get());
// TODO(hclam): fix this.
last_timestamp_ += 100;
@@ -155,16 +177,7 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data,
// TODO(hclam): Make sure we get exactly one frame from the packet.
// TODO(hclam): We should provide the output buffer to avoid one copy.
- ChromotingHostMessage* message = new ChromotingHostMessage();
- UpdateStreamPacketMessage* packet = message->mutable_update_stream_packet();
-
- // Prepare the begin rect.
- packet->mutable_begin_rect()->set_x(0);
- packet->mutable_begin_rect()->set_y(0);
- packet->mutable_begin_rect()->set_width(capture_data->width());
- packet->mutable_begin_rect()->set_height(capture_data->height());
- packet->mutable_begin_rect()->set_encoding(EncodingVp8);
- packet->mutable_begin_rect()->set_pixel_format(PixelFormatYv12);
+ VideoPacket* message = new VideoPacket();
while (!got_data) {
const vpx_codec_cx_pkt_t* packet = vpx_codec_get_cx_data(codec_.get(),
@@ -175,7 +188,7 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data,
switch (packet->kind) {
case VPX_CODEC_CX_FRAME_PKT:
got_data = true;
- message->mutable_update_stream_packet()->mutable_rect_data()->set_data(
+ message->set_data(
packet->data.frame.buf, packet->data.frame.sz);
break;
default:
@@ -183,11 +196,15 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data,
}
}
- // Enter the end rect.
- message->mutable_update_stream_packet()->mutable_end_rect();
- data_available_callback->Run(
- message,
- EncodingStarting | EncodingInProgress | EncodingEnded);
+ message->mutable_format()->set_encoding(VideoPacketFormat::ENCODING_VP8);
+ message->set_flags(VideoPacket::FIRST_PACKET | VideoPacket::LAST_PACKET);
+ message->mutable_format()->set_pixel_format(PIXEL_FORMAT_RGB32);
+ message->mutable_format()->set_x(0);
+ message->mutable_format()->set_y(0);
+ message->mutable_format()->set_width(capture_data->width());
+ message->mutable_format()->set_height(capture_data->height());
+
+ data_available_callback->Run(message);
delete data_available_callback;
}
diff --git a/remoting/base/encoder_zlib.cc b/remoting/base/encoder_zlib.cc
index ddbe923..e184fd3 100644
--- a/remoting/base/encoder_zlib.cc
+++ b/remoting/base/encoder_zlib.cc
@@ -26,7 +26,7 @@ EncoderZlib::~EncoderZlib() {}
void EncoderZlib::Encode(scoped_refptr<CaptureData> capture_data,
bool key_frame,
DataAvailableCallback* data_available_callback) {
- CHECK(capture_data->pixel_format() == PixelFormatRgb32)
+ CHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32)
<< "Zlib Encoder only works with RGB32. Got "
<< capture_data->pixel_format();
capture_data_ = capture_data;
@@ -51,24 +51,22 @@ void EncoderZlib::EncodeRect(CompressorZlib* compressor,
const int bytes_per_pixel = GetBytesPerPixel(capture_data_->pixel_format());
const int row_size = bytes_per_pixel * rect.width();
- ChromotingHostMessage* message = new ChromotingHostMessage();
- RectangleUpdatePacket* update = message->mutable_rectangle_update();
- PrepareUpdateStart(rect, update);
+ VideoPacket* packet = new VideoPacket();
+ PrepareUpdateStart(rect, packet);
const uint8* in = capture_data_->data_planes().data[0] +
rect.y() * strides +
rect.x() * bytes_per_pixel;
// TODO(hclam): Fill in the sequence number.
- uint8* out = GetOutputBuffer(update, packet_size_);
+ uint8* out = GetOutputBuffer(packet, packet_size_);
int filled = 0;
int row_x = 0;
int row_y = 0;
bool compress_again = true;
while (compress_again) {
// Prepare a message for sending out.
- if (!message) {
- message = new ChromotingHostMessage();
- update = message->mutable_rectangle_update();
- out = GetOutputBuffer(update, packet_size_);
+ if (!packet) {
+ packet = new VideoPacket();
+ out = GetOutputBuffer(packet, packet_size_);
filled = 0;
}
@@ -91,15 +89,14 @@ void EncoderZlib::EncodeRect(CompressorZlib* compressor,
// We have reached the end of stream.
if (!compress_again) {
- update->set_flags(update->flags() | RectangleUpdatePacket::LAST_PACKET);
+ packet->set_flags(packet->flags() | VideoPacket::LAST_PACKET);
}
// If we have filled the message or we have reached the end of stream.
if (filled == packet_size_ || !compress_again) {
- message->mutable_rectangle_update()->mutable_encoded_rect()->
- resize(filled);
- SubmitMessage(message, rect_index);
- message = NULL;
+ packet->mutable_data()->resize(filled);
+ SubmitMessage(packet, rect_index);
+ packet = NULL;
}
// Reached the end of input row and we're not at the last row.
@@ -112,40 +109,27 @@ void EncoderZlib::EncodeRect(CompressorZlib* compressor,
}
void EncoderZlib::PrepareUpdateStart(const gfx::Rect& rect,
- RectangleUpdatePacket* update) {
-
- update->set_flags(update->flags() | RectangleUpdatePacket::FIRST_PACKET);
- RectangleFormat* format = update->mutable_format();
+ VideoPacket* packet) {
+ packet->set_flags(packet->flags() | VideoPacket::FIRST_PACKET);
+ VideoPacketFormat* format = packet->mutable_format();
format->set_x(rect.x());
format->set_y(rect.y());
format->set_width(rect.width());
format->set_height(rect.height());
- format->set_encoding(EncodingZlib);
+ format->set_encoding(VideoPacketFormat::ENCODING_ZLIB);
format->set_pixel_format(capture_data_->pixel_format());
}
-uint8* EncoderZlib::GetOutputBuffer(RectangleUpdatePacket* update,
- size_t size) {
- update->mutable_encoded_rect()->resize(size);
+uint8* EncoderZlib::GetOutputBuffer(VideoPacket* packet, size_t size) {
+ packet->mutable_data()->resize(size);
// TODO(ajwong): Is there a better way to do this at all???
return const_cast<uint8*>(reinterpret_cast<const uint8*>(
- update->mutable_encoded_rect()->data()));
+ packet->mutable_data()->data()));
}
-void EncoderZlib::SubmitMessage(ChromotingHostMessage* message,
- size_t rect_index) {
- EncodingState state = EncodingInProgress;
- const RectangleUpdatePacket& update = message->rectangle_update();
- if (rect_index == 0 &&
- (update.flags() | RectangleUpdatePacket::FIRST_PACKET)) {
- state |= EncodingStarting;
- }
- if (rect_index == capture_data_->dirty_rects().size() - 1 &&
- (update.flags() | RectangleUpdatePacket::LAST_PACKET)) {
- state |= EncodingEnded;
- }
- callback_->Run(message, state);
+void EncoderZlib::SubmitMessage(VideoPacket* packet, size_t rect_index) {
+ callback_->Run(packet);
}
} // namespace remoting
diff --git a/remoting/base/encoder_zlib.h b/remoting/base/encoder_zlib.h
index 3d0a13e..6699f03 100644
--- a/remoting/base/encoder_zlib.h
+++ b/remoting/base/encoder_zlib.h
@@ -32,15 +32,14 @@ class EncoderZlib : public Encoder {
size_t rect_index);
// Marks a packets as the first in a series of rectangle updates.
- void PrepareUpdateStart(const gfx::Rect& rect,
- RectangleUpdatePacket* update);
+ void PrepareUpdateStart(const gfx::Rect& rect, VideoPacket* packet);
// Retrieves a pointer to the output buffer in |update| used for storing the
// encoded rectangle data. Will resize the buffer to |size|.
- uint8* GetOutputBuffer(RectangleUpdatePacket* update, size_t size);
+ uint8* GetOutputBuffer(VideoPacket* packet, size_t size);
// Submit |message| to |callback_|.
- void SubmitMessage(ChromotingHostMessage* message, size_t rect_index);
+ void SubmitMessage(VideoPacket* packet, size_t rect_index);
scoped_refptr<CaptureData> capture_data_;
scoped_ptr<DataAvailableCallback> callback_;
diff --git a/remoting/base/multiple_array_input_stream_unittest.cc b/remoting/base/multiple_array_input_stream_unittest.cc
index 4a840e4..1a21add 100644
--- a/remoting/base/multiple_array_input_stream_unittest.cc
+++ b/remoting/base/multiple_array_input_stream_unittest.cc
@@ -46,7 +46,7 @@ static void ReadString(MultipleArrayInputStream* input,
scoped_array<char> buffer(new char[str.size() + 1]);
buffer[str.size()] = '\0';
EXPECT_EQ(ReadFromInput(input, buffer.get(), str.size()), str.size());
- EXPECT_STREQ(str.c_str(), buffer.get());
+ EXPECT_STREQ(str.data(), buffer.get());
}
// Construct and prepare data in the |output_stream|.
@@ -69,7 +69,7 @@ static void PrepareData(scoped_ptr<MultipleArrayInputStream>* stream) {
}
MultipleArrayInputStream* mstream = new MultipleArrayInputStream();
- const char* data = kTestData.c_str();
+ const char* data = kTestData.data();
for (int i = 0; i < segments; ++i) {
int size = i % 2 == 0 ? 1 : 2;
mstream->AddBuffer(new net::StringIOBuffer(std::string(data, size)), size);
diff --git a/remoting/base/util.cc b/remoting/base/util.cc
index 8ccbd23..f24d322 100644
--- a/remoting/base/util.cc
+++ b/remoting/base/util.cc
@@ -10,13 +10,13 @@ namespace remoting {
int GetBytesPerPixel(PixelFormat format) {
// Note: The order is important here for performance. This is sorted from the
- // most common to the less common (PixelFormatAscii is mostly used
+ // most common to the less common (PIXEL_FORMAT_ASCII is mostly used
// just for testing).
switch (format) {
- case PixelFormatRgb24: return 3;
- case PixelFormatRgb565: return 2;
- case PixelFormatRgb32: return 4;
- case PixelFormatAscii: return 1;
+ case PIXEL_FORMAT_RGB24: return 3;
+ case PIXEL_FORMAT_RGB565: return 2;
+ case PIXEL_FORMAT_RGB32: return 4;
+ case PIXEL_FORMAT_ASCII: return 1;
default:
NOTREACHED() << "Pixel format not supported";
return 0;
diff --git a/remoting/base/util.h b/remoting/base/util.h
index e3db289..d7f4128 100644
--- a/remoting/base/util.h
+++ b/remoting/base/util.h
@@ -5,7 +5,7 @@
#ifndef REMOTING_BASE_UTIL_H_
#define REMOTING_BASE_UTIL_H_
-#include "remoting/proto/event.pb.h"
+#include "remoting/proto/video.pb.h"
namespace remoting {