diff options
42 files changed, 510 insertions, 424 deletions
diff --git a/remoting/base/capture_data.h b/remoting/base/capture_data.h index 97631ab..0d707f9 100644 --- a/remoting/base/capture_data.h +++ b/remoting/base/capture_data.h @@ -10,7 +10,7 @@ #include "base/basictypes.h" #include "base/ref_counted.h" #include "remoting/base/types.h" -#include "remoting/proto/video.pb.h" +#include "remoting/proto/event.pb.h" namespace remoting { diff --git a/remoting/base/codec_test.cc b/remoting/base/codec_test.cc index dcc165e..62b98607 100644 --- a/remoting/base/codec_test.cc +++ b/remoting/base/codec_test.cc @@ -317,7 +317,7 @@ class EncoderTester { scoped_refptr<CaptureData> PrepareEncodeData(PixelFormat format, uint8** memory) { // TODO(hclam): Support also YUV format. - CHECK(format == PIXEL_FORMAT_RGB32); + CHECK(format == PixelFormatRgb32); int size = kWidth * kHeight * kBytesPerPixel; *memory = new uint8[size]; @@ -360,7 +360,7 @@ void TestEncoder(Encoder* encoder, bool strict) { uint8* memory; scoped_refptr<CaptureData> data = - PrepareEncodeData(PIXEL_FORMAT_RGB32, &memory); + PrepareEncodeData(PixelFormatRgb32, &memory); TestEncodingRects(encoder, &tester, data, kTestRects, 1); TestEncodingRects(encoder, &tester, data, kTestRects + 1, 1); @@ -412,7 +412,7 @@ void TestEncoderDecoder(Encoder* encoder, Decoder* decoder, bool strict) { uint8* memory; scoped_refptr<CaptureData> data = - PrepareEncodeData(PIXEL_FORMAT_RGB32, &memory); + PrepareEncodeData(PixelFormatRgb32, &memory); DecoderTester decoder_tester(decoder); decoder_tester.set_strict(strict); decoder_tester.set_capture_data(data); diff --git a/remoting/base/decoder.h b/remoting/base/decoder.h index 82211a7..0a6ceff 100644 --- a/remoting/base/decoder.h +++ b/remoting/base/decoder.h @@ -9,7 +9,7 @@ #include "base/scoped_ptr.h" #include "gfx/rect.h" #include "media/base/video_frame.h" -#include "remoting/proto/video.pb.h" +#include "remoting/proto/event.pb.h" namespace remoting { @@ -51,7 +51,7 @@ class Decoder { // Returns true if decoder is ready to accept data via ProcessRectangleData. virtual bool IsReadyForData() = 0; - virtual VideoPacketFormat::Encoding Encoding() = 0; + virtual UpdateStreamEncoding Encoding() = 0; }; } // namespace remoting diff --git a/remoting/base/decoder_row_based.cc b/remoting/base/decoder_row_based.cc index d378e96..65c74c5 100644 --- a/remoting/base/decoder_row_based.cc +++ b/remoting/base/decoder_row_based.cc @@ -12,17 +12,15 @@ namespace remoting { DecoderRowBased* DecoderRowBased::CreateZlibDecoder() { - return new DecoderRowBased(new DecompressorZlib(), - VideoPacketFormat::ENCODING_ZLIB); + return new DecoderRowBased(new DecompressorZlib(), EncodingZlib); } DecoderRowBased* DecoderRowBased::CreateVerbatimDecoder() { - return new DecoderRowBased(new DecompressorVerbatim(), - VideoPacketFormat::ENCODING_VERBATIM); + return new DecoderRowBased(new DecompressorVerbatim(), EncodingNone); } DecoderRowBased::DecoderRowBased(Decompressor* decompressor, - VideoPacketFormat::Encoding encoding) + UpdateStreamEncoding encoding) : state_(kUninitialized), decompressor_(decompressor), encoding_(encoding), @@ -54,7 +52,7 @@ void DecoderRowBased::Initialize(scoped_refptr<media::VideoFrame> frame, // Make sure we are not currently initialized. CHECK_EQ(kUninitialized, state_); - if (static_cast<PixelFormat>(frame->format()) != PIXEL_FORMAT_RGB32) { + if (static_cast<PixelFormat>(frame->format()) != PixelFormatRgb32) { LOG(WARNING) << "DecoderRowBased only supports RGB32."; state_ = kError; return; diff --git a/remoting/base/decoder_row_based.h b/remoting/base/decoder_row_based.h index 2deb897..c226db2 100644 --- a/remoting/base/decoder_row_based.h +++ b/remoting/base/decoder_row_based.h @@ -24,7 +24,7 @@ class DecoderRowBased : public Decoder { virtual void Initialize(scoped_refptr<media::VideoFrame> frame, const gfx::Rect& clip, int bytes_per_src_pixel); virtual void DecodeBytes(const std::string& encoded_bytes); - virtual VideoPacketFormat::Encoding Encoding() { return encoding_; } + virtual UpdateStreamEncoding Encoding() { return encoding_; } // TODO(hclam): Should make this into the Decoder interface. // TODO(ajwong): Before putting into the interface, we should decide if the @@ -32,8 +32,7 @@ class DecoderRowBased : public Decoder { void set_reverse_rows(bool reverse) { reverse_rows_ = reverse; } private: - DecoderRowBased(Decompressor* decompressor, - VideoPacketFormat::Encoding encoding); + DecoderRowBased(Decompressor* decompressor, UpdateStreamEncoding encoding); enum State { kUninitialized, @@ -54,7 +53,7 @@ class DecoderRowBased : public Decoder { scoped_ptr<Decompressor> decompressor_; // The encoding of the incoming stream. - VideoPacketFormat::Encoding encoding_; + UpdateStreamEncoding encoding_; // Number of bytes per pixel from source stream. int bytes_per_src_pixel_; diff --git a/remoting/base/decoder_vp8.cc b/remoting/base/decoder_vp8.cc index a8d5245..452cd1f 100644 --- a/remoting/base/decoder_vp8.cc +++ b/remoting/base/decoder_vp8.cc @@ -18,7 +18,12 @@ extern "C" { namespace remoting { DecoderVp8::DecoderVp8() - : state_(kUninitialized), + : state_(kWaitingForBeginRect), + rect_x_(0), + rect_y_(0), + rect_width_(0), + rect_height_(0), + updated_rects_(NULL), codec_(NULL) { } @@ -30,22 +35,71 @@ DecoderVp8::~DecoderVp8() { delete codec_; } -void DecoderVp8::Initialize(scoped_refptr<media::VideoFrame> frame, - const gfx::Rect& clip, int bytes_per_src_pixel) { - DCHECK_EQ(kUninitialized, state_); +bool DecoderVp8::BeginDecode(scoped_refptr<media::VideoFrame> frame, + UpdatedRects* updated_rects, + Task* partial_decode_done, + Task* decode_done) { + DCHECK(!partial_decode_done_.get()); + DCHECK(!decode_done_.get()); + DCHECK(!updated_rects_); + DCHECK_EQ(kWaitingForBeginRect, state_); + + partial_decode_done_.reset(partial_decode_done); + decode_done_.reset(decode_done); + updated_rects_ = updated_rects; if (frame->format() != media::VideoFrame::RGB32) { LOG(INFO) << "DecoderVp8 only supports RGB32 as output"; - state_ = kError; - return; + return false; } frame_ = frame; + return true; +} + +bool DecoderVp8::PartialDecode(ChromotingHostMessage* message) { + scoped_ptr<ChromotingHostMessage> msg_deleter(message); + DCHECK(message->has_update_stream_packet()); + + bool ret = true; + if (message->update_stream_packet().has_begin_rect()) + ret = HandleBeginRect(message); + if (ret && message->update_stream_packet().has_rect_data()) + ret = HandleRectData(message); + if (ret && message->update_stream_packet().has_end_rect()) + ret = HandleEndRect(message); + return ret; +} - state_ = kReady; +void DecoderVp8::EndDecode() { + DCHECK_EQ(kWaitingForBeginRect, state_); + decode_done_->Run(); + + partial_decode_done_.reset(); + decode_done_.reset(); + frame_ = NULL; + updated_rects_ = NULL; } -void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) { - DCHECK_EQ(kReady, state_); +bool DecoderVp8::HandleBeginRect(ChromotingHostMessage* message) { + DCHECK_EQ(kWaitingForBeginRect, state_); + state_ = kWaitingForRectData; + + rect_width_ = message->update_stream_packet().begin_rect().width(); + rect_height_ = message->update_stream_packet().begin_rect().height(); + rect_x_ = message->update_stream_packet().begin_rect().x(); + rect_y_ = message->update_stream_packet().begin_rect().y(); + + PixelFormat pixel_format = + message->update_stream_packet().begin_rect().pixel_format(); + if (pixel_format != PixelFormatYv12) + return false; + return true; +} + +bool DecoderVp8::HandleRectData(ChromotingHostMessage* message) { + DCHECK_EQ(kWaitingForRectData, state_); + DCHECK_EQ(0, + message->update_stream_packet().rect_data().sequence_number()); // Initialize the codec as needed. if (!codec_) { @@ -58,21 +112,25 @@ void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) { LOG(INFO) << "Cannot initialize codec."; delete codec_; codec_ = NULL; - state_ = kError; - return; + return false; } } - LOG(WARNING) << "Decoding " << encoded_bytes.size(); - // Do the actual decoding. vpx_codec_err_t ret = vpx_codec_decode( - codec_, reinterpret_cast<const uint8*>(encoded_bytes.data()), - encoded_bytes.size(), NULL, 0); + codec_, + (uint8_t*)message->update_stream_packet().rect_data().data().c_str(), + message->update_stream_packet().rect_data().data().size(), + NULL, 0); if (ret != VPX_CODEC_OK) { - LOG(INFO) << "Decoding failed:" << vpx_codec_err_to_string(ret) << "\n" - << "Details: " << vpx_codec_error(codec_) << "\n" + LOG(INFO) << "Decoding failed:" + << vpx_codec_err_to_string(ret) + << "\n" + << "Details: " + << vpx_codec_error(codec_) + << "\n" << vpx_codec_error_detail(codec_); + return false; } // Gets the decoded data. @@ -80,28 +138,28 @@ void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) { vpx_image_t* image = vpx_codec_get_frame(codec_, &iter); if (!image) { LOG(INFO) << "No video frame decoded"; + return false; } // Perform YUV conversion. media::ConvertYUVToRGB32(image->planes[0], image->planes[1], image->planes[2], frame_->data(media::VideoFrame::kRGBPlane), - frame_->width(), frame_->height(), + rect_width_, rect_height_, image->stride[0], image->stride[1], frame_->stride(media::VideoFrame::kRGBPlane), media::YV12); -} - -void DecoderVp8::Reset() { - frame_ = NULL; - state_ = kUninitialized; -} -bool DecoderVp8::IsReadyForData() { - return state_ == kReady; + updated_rects_->clear(); + updated_rects_->push_back(gfx::Rect(rect_x_, rect_y_, + rect_width_, rect_height_)); + partial_decode_done_->Run(); + return true; } -VideoPacketFormat::Encoding DecoderVp8::Encoding() { - return VideoPacketFormat::ENCODING_VP8; +bool DecoderVp8::HandleEndRect(ChromotingHostMessage* message) { + DCHECK_EQ(kWaitingForRectData, state_); + state_ = kWaitingForBeginRect; + return true; } } // namespace remoting diff --git a/remoting/base/decoder_vp8.h b/remoting/base/decoder_vp8.h index dfef0b7..5fe0169 100644 --- a/remoting/base/decoder_vp8.h +++ b/remoting/base/decoder_vp8.h @@ -14,34 +14,37 @@ namespace remoting { class DecoderVp8 : public Decoder { public: DecoderVp8(); - virtual ~DecoderVp8(); + ~DecoderVp8(); // Decoder implementations. - virtual void Initialize(scoped_refptr<media::VideoFrame> frame, - const gfx::Rect& clip, int bytes_per_src_pixel); - - virtual void Reset(); - - // Feeds more data into the decoder. - virtual void DecodeBytes(const std::string& encoded_bytes); - - // Returns true if decoder is ready to accept data via ProcessRectangleData. - virtual bool IsReadyForData(); - - virtual VideoPacketFormat::Encoding Encoding(); + virtual bool BeginDecode(scoped_refptr<media::VideoFrame> frame, + UpdatedRects* update_rects, + Task* partial_decode_done, + Task* decode_done); + virtual bool PartialDecode(ChromotingHostMessage* message); + virtual void EndDecode(); private: - enum State { - kUninitialized, - kReady, - kError, - }; + bool HandleBeginRect(ChromotingHostMessage* message); + bool HandleRectData(ChromotingHostMessage* message); + bool HandleEndRect(ChromotingHostMessage* message); // The internal state of the decoder. State state_; + // Keeps track of the updating rect. + int rect_x_; + int rect_y_; + int rect_width_; + int rect_height_; + + // Tasks to call when decode is done. + scoped_ptr<Task> partial_decode_done_; + scoped_ptr<Task> decode_done_; + // The video frame to write to. scoped_refptr<media::VideoFrame> frame_; + UpdatedRects* updated_rects_; vpx_codec_ctx_t* codec_; diff --git a/remoting/base/encoder.h b/remoting/base/encoder.h index 05e40ff..f9d4043 100644 --- a/remoting/base/encoder.h +++ b/remoting/base/encoder.h @@ -25,12 +25,23 @@ class CaptureData; class Encoder { public: + // EncodingState is a bitfield that tracks the state of the encoding. + // An encoding that consists of a single block could concievably be starting + // inprogress and ended at the same time. + enum { + EncodingStarting = 1 << 0, + EncodingInProgress = 1 << 1, + EncodingEnded = 1 << 2 + }; + typedef int EncodingState; + // DataAvailableCallback is called as blocks of data are made available // from the encoder. Data made available by the encoder is in the form // of HostMessage to reduce the amount of memory copies. // The callback takes ownership of the HostMessage and is responsible for // deleting it. - typedef Callback1<VideoPacket*>::Type DataAvailableCallback; + typedef Callback2<ChromotingHostMessage*, + EncodingState>::Type DataAvailableCallback; virtual ~Encoder() {} diff --git a/remoting/base/encoder_verbatim.cc b/remoting/base/encoder_verbatim.cc index 0bbe4e8..8c59a7d 100644 --- a/remoting/base/encoder_verbatim.cc +++ b/remoting/base/encoder_verbatim.cc @@ -52,14 +52,15 @@ void EncoderVerbatim::EncodeRect(const gfx::Rect& rect, size_t rect_index) { const int bytes_per_pixel = GetBytesPerPixel(capture_data_->pixel_format()); const int row_size = bytes_per_pixel * rect.width(); - VideoPacket* packet = new VideoPacket(); - PrepareUpdateStart(rect, packet); + ChromotingHostMessage* message = new ChromotingHostMessage(); + RectangleUpdatePacket* update = message->mutable_rectangle_update(); + PrepareUpdateStart(rect, update); const uint8* in = capture_data_->data_planes().data[0] + rect.y() * stride + rect.x() * bytes_per_pixel; // TODO(hclam): Fill in the sequence number. - uint8* out = GetOutputBuffer(packet, packet_size_); + uint8* out = GetOutputBuffer(update, packet_size_); int total_bytes = 0; for (int y = 0; y < rect.height(); y++) { memcpy(out, in, row_size); @@ -69,36 +70,49 @@ void EncoderVerbatim::EncodeRect(const gfx::Rect& rect, size_t rect_index) { } // We have reached the end of stream. - packet->set_flags(packet->flags() | VideoPacket::LAST_PACKET); + update->set_flags(update->flags() | RectangleUpdatePacket::LAST_PACKET); // If we have filled the message or we have reached the end of stream. - packet->mutable_data()->resize(total_bytes); - SubmitMessage(packet, rect_index); + message->mutable_rectangle_update()->mutable_encoded_rect()-> + resize(total_bytes); + SubmitMessage(message, rect_index); } void EncoderVerbatim::PrepareUpdateStart(const gfx::Rect& rect, - VideoPacket* packet) { + RectangleUpdatePacket* update) { - packet->set_flags(packet->flags() | VideoPacket::FIRST_PACKET); - VideoPacketFormat* format = packet->mutable_format(); + update->set_flags(update->flags() | RectangleUpdatePacket::FIRST_PACKET); + RectangleFormat* format = update->mutable_format(); format->set_x(rect.x()); format->set_y(rect.y()); format->set_width(rect.width()); format->set_height(rect.height()); - format->set_encoding(VideoPacketFormat::ENCODING_VERBATIM); + format->set_encoding(EncodingNone); format->set_pixel_format(capture_data_->pixel_format()); } -uint8* EncoderVerbatim::GetOutputBuffer(VideoPacket* packet, size_t size) { - packet->mutable_data()->resize(size); +uint8* EncoderVerbatim::GetOutputBuffer(RectangleUpdatePacket* update, + size_t size) { + update->mutable_encoded_rect()->resize(size); // TODO(ajwong): Is there a better way to do this at all??? return const_cast<uint8*>(reinterpret_cast<const uint8*>( - packet->mutable_data()->data())); + update->mutable_encoded_rect()->data())); } -void EncoderVerbatim::SubmitMessage(VideoPacket* packet, size_t rect_index) { - callback_->Run(packet); +void EncoderVerbatim::SubmitMessage(ChromotingHostMessage* message, + size_t rect_index) { + EncodingState state = EncodingInProgress; + const RectangleUpdatePacket& update = message->rectangle_update(); + if (rect_index == 0 && + (update.flags() | RectangleUpdatePacket::FIRST_PACKET)) { + state |= EncodingStarting; + } + if (rect_index == capture_data_->dirty_rects().size() - 1 && + (update.flags() | RectangleUpdatePacket::LAST_PACKET)) { + state |= EncodingEnded; + } + callback_->Run(message, state); } } // namespace remoting diff --git a/remoting/base/encoder_verbatim.h b/remoting/base/encoder_verbatim.h index 81d109d..10fa7ac 100644 --- a/remoting/base/encoder_verbatim.h +++ b/remoting/base/encoder_verbatim.h @@ -31,14 +31,15 @@ class EncoderVerbatim : public Encoder { void EncodeRect(const gfx::Rect& rect, size_t rect_index); // Marks a packets as the first in a series of rectangle updates. - void PrepareUpdateStart(const gfx::Rect& rect, VideoPacket* packet); + void PrepareUpdateStart(const gfx::Rect& rect, + RectangleUpdatePacket* update); // Retrieves a pointer to the output buffer in |update| used for storing the // encoded rectangle data. Will resize the buffer to |size|. - uint8* GetOutputBuffer(VideoPacket* packet, size_t size); + uint8* GetOutputBuffer(RectangleUpdatePacket* update, size_t size); // Submit |message| to |callback_|. - void SubmitMessage(VideoPacket* packet, size_t rect_index); + void SubmitMessage(ChromotingHostMessage* message, size_t rect_index); scoped_refptr<CaptureData> capture_data_; scoped_ptr<DataAvailableCallback> callback_; diff --git a/remoting/base/encoder_vp8.cc b/remoting/base/encoder_vp8.cc index 7a58f7b..eec6ed5 100644 --- a/remoting/base/encoder_vp8.cc +++ b/remoting/base/encoder_vp8.cc @@ -48,7 +48,6 @@ bool EncoderVp8::Init(int width, int height) { vpx_codec_enc_cfg_t config; const vpx_codec_iface_t* algo = (const vpx_codec_iface_t*)media::GetVp8CxAlgoAddress(); - CHECK(algo); vpx_codec_err_t ret = vpx_codec_enc_config_default(algo, &config, 0); if (ret != VPX_CODEC_OK) return false; @@ -71,19 +70,9 @@ bool EncoderVp8::Init(int width, int height) { return true; } -static int clip_byte(int x) { - if (x > 255) - return 255; - else if (x < 0) - return 0; - else - return x; -} - bool EncoderVp8::PrepareImage(scoped_refptr<CaptureData> capture_data) { - const int plane_size = capture_data->width() * capture_data->height(); - if (!yuv_image_.get()) { + const int plane_size = capture_data->width() * capture_data->height(); // YUV image size is 1.5 times of a plane. Multiplication is performed first // to avoid rounding error. @@ -111,36 +100,22 @@ bool EncoderVp8::PrepareImage(scoped_refptr<CaptureData> capture_data) { // And then do RGB->YUV conversion. // Currently we just produce the Y channel as the average of RGB. This will - // giv ae gray scale image after conversion. - // TODO(sergeyu): Move this code to a separate routine. - // TODO(sergeyu): Optimize this code. - DCHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32) + // give a gray scale image after conversion. + // TODO(hclam): Implement the actual color space conversion. + DCHECK(capture_data->pixel_format() == PixelFormatRgb32) << "Only RGB32 is supported"; uint8* in = capture_data->data_planes().data[0]; const int in_stride = capture_data->data_planes().strides[0]; - uint8* y_out = yuv_image_.get(); - uint8* u_out = yuv_image_.get() + plane_size; - uint8* v_out = yuv_image_.get() + plane_size + plane_size / 4; + uint8* out = yuv_image_.get(); const int out_stride = image_->stride[0]; for (int i = 0; i < capture_data->height(); ++i) { for (int j = 0; j < capture_data->width(); ++j) { // Since the input pixel format is RGB32, there are 4 bytes per pixel. uint8* pixel = in + 4 * j; - y_out[j] = clip_byte(((pixel[0] * 66 + pixel[1] * 129 + - pixel[2] * 25 + 128) >> 8) + 16); - if (i % 2 == 0 && j % 2 == 0) { - u_out[j / 2] = clip_byte(((pixel[0] * -38 + pixel[1] * -74 + - pixel[2] * 112 + 128) >> 8) + 128); - v_out[j / 2] = clip_byte(((pixel[0] * 112 + pixel[1] * -94 + - pixel[2] * -18 + 128) >> 8) + 128); - } + out[j] = (pixel[0] + pixel[1] + pixel[2]) / 3; } in += in_stride; - y_out += out_stride; - if (i % 2 == 0) { - u_out += out_stride / 2; - v_out += out_stride / 2; - } + out += out_stride; } return true; } @@ -163,10 +138,13 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data, vpx_codec_err_t ret = vpx_codec_encode(codec_.get(), image_.get(), last_timestamp_, 1, 0, VPX_DL_REALTIME); - DCHECK_EQ(ret, VPX_CODEC_OK) - << "Encoding error: " << vpx_codec_err_to_string(ret) << "\n" - << "Details: " << vpx_codec_error(codec_.get()) << "\n" - << vpx_codec_error_detail(codec_.get()); + DCHECK(ret == VPX_CODEC_OK) << "Encoding error: " + << vpx_codec_err_to_string(ret) + << "\n" + << "Details: " + << vpx_codec_error(codec_.get()) + << "\n" + << vpx_codec_error_detail(codec_.get()); // TODO(hclam): fix this. last_timestamp_ += 100; @@ -177,7 +155,16 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data, // TODO(hclam): Make sure we get exactly one frame from the packet. // TODO(hclam): We should provide the output buffer to avoid one copy. - VideoPacket* message = new VideoPacket(); + ChromotingHostMessage* message = new ChromotingHostMessage(); + UpdateStreamPacketMessage* packet = message->mutable_update_stream_packet(); + + // Prepare the begin rect. + packet->mutable_begin_rect()->set_x(0); + packet->mutable_begin_rect()->set_y(0); + packet->mutable_begin_rect()->set_width(capture_data->width()); + packet->mutable_begin_rect()->set_height(capture_data->height()); + packet->mutable_begin_rect()->set_encoding(EncodingVp8); + packet->mutable_begin_rect()->set_pixel_format(PixelFormatYv12); while (!got_data) { const vpx_codec_cx_pkt_t* packet = vpx_codec_get_cx_data(codec_.get(), @@ -188,7 +175,7 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data, switch (packet->kind) { case VPX_CODEC_CX_FRAME_PKT: got_data = true; - message->set_data( + message->mutable_update_stream_packet()->mutable_rect_data()->set_data( packet->data.frame.buf, packet->data.frame.sz); break; default: @@ -196,15 +183,11 @@ void EncoderVp8::Encode(scoped_refptr<CaptureData> capture_data, } } - message->mutable_format()->set_encoding(VideoPacketFormat::ENCODING_VP8); - message->set_flags(VideoPacket::FIRST_PACKET | VideoPacket::LAST_PACKET); - message->mutable_format()->set_pixel_format(PIXEL_FORMAT_RGB32); - message->mutable_format()->set_x(0); - message->mutable_format()->set_y(0); - message->mutable_format()->set_width(capture_data->width()); - message->mutable_format()->set_height(capture_data->height()); - - data_available_callback->Run(message); + // Enter the end rect. + message->mutable_update_stream_packet()->mutable_end_rect(); + data_available_callback->Run( + message, + EncodingStarting | EncodingInProgress | EncodingEnded); delete data_available_callback; } diff --git a/remoting/base/encoder_zlib.cc b/remoting/base/encoder_zlib.cc index e184fd3..ddbe923 100644 --- a/remoting/base/encoder_zlib.cc +++ b/remoting/base/encoder_zlib.cc @@ -26,7 +26,7 @@ EncoderZlib::~EncoderZlib() {} void EncoderZlib::Encode(scoped_refptr<CaptureData> capture_data, bool key_frame, DataAvailableCallback* data_available_callback) { - CHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32) + CHECK(capture_data->pixel_format() == PixelFormatRgb32) << "Zlib Encoder only works with RGB32. Got " << capture_data->pixel_format(); capture_data_ = capture_data; @@ -51,22 +51,24 @@ void EncoderZlib::EncodeRect(CompressorZlib* compressor, const int bytes_per_pixel = GetBytesPerPixel(capture_data_->pixel_format()); const int row_size = bytes_per_pixel * rect.width(); - VideoPacket* packet = new VideoPacket(); - PrepareUpdateStart(rect, packet); + ChromotingHostMessage* message = new ChromotingHostMessage(); + RectangleUpdatePacket* update = message->mutable_rectangle_update(); + PrepareUpdateStart(rect, update); const uint8* in = capture_data_->data_planes().data[0] + rect.y() * strides + rect.x() * bytes_per_pixel; // TODO(hclam): Fill in the sequence number. - uint8* out = GetOutputBuffer(packet, packet_size_); + uint8* out = GetOutputBuffer(update, packet_size_); int filled = 0; int row_x = 0; int row_y = 0; bool compress_again = true; while (compress_again) { // Prepare a message for sending out. - if (!packet) { - packet = new VideoPacket(); - out = GetOutputBuffer(packet, packet_size_); + if (!message) { + message = new ChromotingHostMessage(); + update = message->mutable_rectangle_update(); + out = GetOutputBuffer(update, packet_size_); filled = 0; } @@ -89,14 +91,15 @@ void EncoderZlib::EncodeRect(CompressorZlib* compressor, // We have reached the end of stream. if (!compress_again) { - packet->set_flags(packet->flags() | VideoPacket::LAST_PACKET); + update->set_flags(update->flags() | RectangleUpdatePacket::LAST_PACKET); } // If we have filled the message or we have reached the end of stream. if (filled == packet_size_ || !compress_again) { - packet->mutable_data()->resize(filled); - SubmitMessage(packet, rect_index); - packet = NULL; + message->mutable_rectangle_update()->mutable_encoded_rect()-> + resize(filled); + SubmitMessage(message, rect_index); + message = NULL; } // Reached the end of input row and we're not at the last row. @@ -109,27 +112,40 @@ void EncoderZlib::EncodeRect(CompressorZlib* compressor, } void EncoderZlib::PrepareUpdateStart(const gfx::Rect& rect, - VideoPacket* packet) { - packet->set_flags(packet->flags() | VideoPacket::FIRST_PACKET); - VideoPacketFormat* format = packet->mutable_format(); + RectangleUpdatePacket* update) { + + update->set_flags(update->flags() | RectangleUpdatePacket::FIRST_PACKET); + RectangleFormat* format = update->mutable_format(); format->set_x(rect.x()); format->set_y(rect.y()); format->set_width(rect.width()); format->set_height(rect.height()); - format->set_encoding(VideoPacketFormat::ENCODING_ZLIB); + format->set_encoding(EncodingZlib); format->set_pixel_format(capture_data_->pixel_format()); } -uint8* EncoderZlib::GetOutputBuffer(VideoPacket* packet, size_t size) { - packet->mutable_data()->resize(size); +uint8* EncoderZlib::GetOutputBuffer(RectangleUpdatePacket* update, + size_t size) { + update->mutable_encoded_rect()->resize(size); // TODO(ajwong): Is there a better way to do this at all??? return const_cast<uint8*>(reinterpret_cast<const uint8*>( - packet->mutable_data()->data())); + update->mutable_encoded_rect()->data())); } -void EncoderZlib::SubmitMessage(VideoPacket* packet, size_t rect_index) { - callback_->Run(packet); +void EncoderZlib::SubmitMessage(ChromotingHostMessage* message, + size_t rect_index) { + EncodingState state = EncodingInProgress; + const RectangleUpdatePacket& update = message->rectangle_update(); + if (rect_index == 0 && + (update.flags() | RectangleUpdatePacket::FIRST_PACKET)) { + state |= EncodingStarting; + } + if (rect_index == capture_data_->dirty_rects().size() - 1 && + (update.flags() | RectangleUpdatePacket::LAST_PACKET)) { + state |= EncodingEnded; + } + callback_->Run(message, state); } } // namespace remoting diff --git a/remoting/base/encoder_zlib.h b/remoting/base/encoder_zlib.h index 6699f03..3d0a13e 100644 --- a/remoting/base/encoder_zlib.h +++ b/remoting/base/encoder_zlib.h @@ -32,14 +32,15 @@ class EncoderZlib : public Encoder { size_t rect_index); // Marks a packets as the first in a series of rectangle updates. - void PrepareUpdateStart(const gfx::Rect& rect, VideoPacket* packet); + void PrepareUpdateStart(const gfx::Rect& rect, + RectangleUpdatePacket* update); // Retrieves a pointer to the output buffer in |update| used for storing the // encoded rectangle data. Will resize the buffer to |size|. - uint8* GetOutputBuffer(VideoPacket* packet, size_t size); + uint8* GetOutputBuffer(RectangleUpdatePacket* update, size_t size); // Submit |message| to |callback_|. - void SubmitMessage(VideoPacket* packet, size_t rect_index); + void SubmitMessage(ChromotingHostMessage* message, size_t rect_index); scoped_refptr<CaptureData> capture_data_; scoped_ptr<DataAvailableCallback> callback_; diff --git a/remoting/base/multiple_array_input_stream_unittest.cc b/remoting/base/multiple_array_input_stream_unittest.cc index 1a21add..4a840e4 100644 --- a/remoting/base/multiple_array_input_stream_unittest.cc +++ b/remoting/base/multiple_array_input_stream_unittest.cc @@ -46,7 +46,7 @@ static void ReadString(MultipleArrayInputStream* input, scoped_array<char> buffer(new char[str.size() + 1]); buffer[str.size()] = '\0'; EXPECT_EQ(ReadFromInput(input, buffer.get(), str.size()), str.size()); - EXPECT_STREQ(str.data(), buffer.get()); + EXPECT_STREQ(str.c_str(), buffer.get()); } // Construct and prepare data in the |output_stream|. @@ -69,7 +69,7 @@ static void PrepareData(scoped_ptr<MultipleArrayInputStream>* stream) { } MultipleArrayInputStream* mstream = new MultipleArrayInputStream(); - const char* data = kTestData.data(); + const char* data = kTestData.c_str(); for (int i = 0; i < segments; ++i) { int size = i % 2 == 0 ? 1 : 2; mstream->AddBuffer(new net::StringIOBuffer(std::string(data, size)), size); diff --git a/remoting/base/util.cc b/remoting/base/util.cc index f24d322..8ccbd23 100644 --- a/remoting/base/util.cc +++ b/remoting/base/util.cc @@ -10,13 +10,13 @@ namespace remoting { int GetBytesPerPixel(PixelFormat format) { // Note: The order is important here for performance. This is sorted from the - // most common to the less common (PIXEL_FORMAT_ASCII is mostly used + // most common to the less common (PixelFormatAscii is mostly used // just for testing). switch (format) { - case PIXEL_FORMAT_RGB24: return 3; - case PIXEL_FORMAT_RGB565: return 2; - case PIXEL_FORMAT_RGB32: return 4; - case PIXEL_FORMAT_ASCII: return 1; + case PixelFormatRgb24: return 3; + case PixelFormatRgb565: return 2; + case PixelFormatRgb32: return 4; + case PixelFormatAscii: return 1; default: NOTREACHED() << "Pixel format not supported"; return 0; diff --git a/remoting/base/util.h b/remoting/base/util.h index d7f4128..e3db289 100644 --- a/remoting/base/util.h +++ b/remoting/base/util.h @@ -5,7 +5,7 @@ #ifndef REMOTING_BASE_UTIL_H_ #define REMOTING_BASE_UTIL_H_ -#include "remoting/proto/video.pb.h" +#include "remoting/proto/event.pb.h" namespace remoting { diff --git a/remoting/client/chromoting_client.cc b/remoting/client/chromoting_client.cc index 8b46a9d..95271be 100644 --- a/remoting/client/chromoting_client.cc +++ b/remoting/client/chromoting_client.cc @@ -131,10 +131,10 @@ void ChromotingClient::DispatchMessage() { // TODO(ajwong): Change this to use a done callback. InitClient(msg->init_client(), NewTracedMethod(this, &ChromotingClient::OnMessageDone, msg)); - } else if (msg->has_video_packet()) { + } else if (msg->has_rectangle_update()) { ScopedTracer tracer("Handle Rectangle Update"); rectangle_decoder_->DecodePacket( - msg->video_packet(), + msg->rectangle_update(), NewTracedMethod(this, &ChromotingClient::OnMessageDone, msg)); } else { NOTREACHED() << "Unknown message received"; diff --git a/remoting/client/chromoting_view_unittest.cc b/remoting/client/chromoting_view_unittest.cc index a57962e..4b31435 100644 --- a/remoting/client/chromoting_view_unittest.cc +++ b/remoting/client/chromoting_view_unittest.cc @@ -26,7 +26,7 @@ class MockDecoder : public Decoder { MOCK_METHOD1(PartialDecode, bool(ChromotingHostMessage* message)); MOCK_METHOD0(EndDecode, void()); - MOCK_METHOD0(Encoding, VideoPacketFormat::Encoding()); + MOCK_METHOD0(Encoding, UpdateStreamEncoding()); MOCK_METHOD0(IsStarted, bool()); private: @@ -64,7 +64,7 @@ class FakeView : public ChromotingView { } // Testing wrappers for private setup/startup decoder routines. - bool setup_decoder(VideoPacketFormat::Encoding encoding) { + bool setup_decoder(UpdateStreamEncoding encoding) { return SetupDecoder(encoding); } bool begin_decoding(Task* partial_decode_done, Task* decode_done) { diff --git a/remoting/client/rectangle_update_decoder.cc b/remoting/client/rectangle_update_decoder.cc index 9a77860..8471991 100644 --- a/remoting/client/rectangle_update_decoder.cc +++ b/remoting/client/rectangle_update_decoder.cc @@ -9,7 +9,6 @@ #include "media/base/callback.h" #include "remoting/base/decoder.h" #include "remoting/base/decoder_row_based.h" -#include "remoting/base/decoder_vp8.h" #include "remoting/base/tracer.h" #include "remoting/base/util.h" #include "remoting/client/frame_consumer.h" @@ -47,7 +46,7 @@ RectangleUpdateDecoder::RectangleUpdateDecoder(MessageLoop* message_loop, RectangleUpdateDecoder::~RectangleUpdateDecoder() { } -void RectangleUpdateDecoder::DecodePacket(const VideoPacket& packet, +void RectangleUpdateDecoder::DecodePacket(const RectangleUpdatePacket& packet, Task* done) { if (message_loop_ != MessageLoop::current()) { message_loop_->PostTask( @@ -71,8 +70,8 @@ void RectangleUpdateDecoder::DecodePacket(const VideoPacket& packet, &RectangleUpdateDecoder::ProcessPacketData, packet, done_runner.release()); - if (packet.flags() | VideoPacket::FIRST_PACKET) { - const VideoPacketFormat& format = packet.format(); + if (packet.flags() | RectangleUpdatePacket::FIRST_PACKET) { + const RectangleFormat& format = packet.format(); InitializeDecoder(format, process_packet_data); } else { @@ -82,7 +81,8 @@ void RectangleUpdateDecoder::DecodePacket(const VideoPacket& packet, } void RectangleUpdateDecoder::ProcessPacketData( - const VideoPacket& packet, Task* done) { + const RectangleUpdatePacket& packet, + Task* done) { AutoTaskRunner done_runner(done); if (!decoder_->IsReadyForData()) { @@ -92,9 +92,9 @@ void RectangleUpdateDecoder::ProcessPacketData( } TraceContext::tracer()->PrintString("Executing Decode."); - decoder_->DecodeBytes(packet.data()); + decoder_->DecodeBytes(packet.encoded_rect()); - if (packet.flags() | VideoPacket::LAST_PACKET) { + if (packet.flags() | RectangleUpdatePacket::LAST_PACKET) { decoder_->Reset(); UpdatedRects* rects = new UpdatedRects(); @@ -109,38 +109,39 @@ void RectangleUpdateDecoder::ProcessPacketData( } // static -bool RectangleUpdateDecoder::IsValidPacket(const VideoPacket& packet) { +bool RectangleUpdateDecoder::IsValidPacket( + const RectangleUpdatePacket& packet) { if (!packet.IsInitialized()) { LOG(WARNING) << "Protobuf consistency checks fail."; return false; } // First packet must have a format. - if (packet.flags() | VideoPacket::FIRST_PACKET) { + if (packet.flags() | RectangleUpdatePacket::FIRST_PACKET) { if (!packet.has_format()) { LOG(WARNING) << "First packet must have format."; return false; } // TODO(ajwong): Verify that we don't need to whitelist encodings. - const VideoPacketFormat& format = packet.format(); + const RectangleFormat& format = packet.format(); if (!format.has_encoding() || - format.encoding() == VideoPacketFormat::ENCODING_INVALID) { + format.encoding() == EncodingInvalid) { LOG(WARNING) << "Invalid encoding specified."; return false; } } // We shouldn't generate null packets. - if (!packet.has_data()) { - LOG(WARNING) << "Packet w/o data received."; + if (!packet.has_encoded_rect()) { + LOG(WARNING) << "Packet w/o an encoded rectangle received."; return false; } return true; } -void RectangleUpdateDecoder::InitializeDecoder(const VideoPacketFormat& format, +void RectangleUpdateDecoder::InitializeDecoder(const RectangleFormat& format, Task* done) { if (message_loop_ != MessageLoop::current()) { message_loop_->PostTask( @@ -191,15 +192,12 @@ void RectangleUpdateDecoder::InitializeDecoder(const VideoPacketFormat& format, CHECK(decoder_->Encoding() == format.encoding()); } else { // Initialize a new decoder based on this message encoding. - if (format.encoding() == VideoPacketFormat::ENCODING_VERBATIM) { + if (format.encoding() == EncodingNone) { TraceContext::tracer()->PrintString("Creating Verbatim decoder."); decoder_.reset(DecoderRowBased::CreateVerbatimDecoder()); - } else if (format.encoding() == VideoPacketFormat::ENCODING_ZLIB) { + } else if (format.encoding() == EncodingZlib) { TraceContext::tracer()->PrintString("Creating Zlib decoder"); decoder_.reset(DecoderRowBased::CreateZlibDecoder()); - } else if (format.encoding() == VideoPacketFormat::ENCODING_VP8) { - TraceContext::tracer()->PrintString("Creating VP8 decoder"); - decoder_.reset(new DecoderVp8()); } else { NOTREACHED() << "Invalid Encoding found: " << format.encoding(); } diff --git a/remoting/client/rectangle_update_decoder.h b/remoting/client/rectangle_update_decoder.h index e6c344e..b383c20 100644 --- a/remoting/client/rectangle_update_decoder.h +++ b/remoting/client/rectangle_update_decoder.h @@ -16,8 +16,8 @@ namespace remoting { class Decoder; class FrameConsumer; -class VideoPacketFormat; -class VideoPacket; +class RectangleFormat; +class RectangleUpdatePacket; // TODO(ajwong): Re-examine this API, especially with regards to how error // conditions on each step are reported. Should they be CHECKs? Logs? Other? @@ -34,14 +34,14 @@ class RectangleUpdateDecoder { // // TODO(ajwong): Should packet be a const pointer to make the lifetime // more clear? - void DecodePacket(const VideoPacket& packet, Task* done); + void DecodePacket(const RectangleUpdatePacket& packet, Task* done); private: - static bool IsValidPacket(const VideoPacket& packet); + static bool IsValidPacket(const RectangleUpdatePacket& packet); - void InitializeDecoder(const VideoPacketFormat& format, Task* done); + void InitializeDecoder(const RectangleFormat& format, Task* done); - void ProcessPacketData(const VideoPacket& packet, Task* done); + void ProcessPacketData(const RectangleUpdatePacket& packet, Task* done); // Pointers to infrastructure objects. Not owned. MessageLoop* message_loop_; diff --git a/remoting/host/capturer.cc b/remoting/host/capturer.cc index 9e63041..61148e6 100644 --- a/remoting/host/capturer.cc +++ b/remoting/host/capturer.cc @@ -13,7 +13,7 @@ namespace remoting { Capturer::Capturer() : width_(0), height_(0), - pixel_format_(PIXEL_FORMAT_INVALID), + pixel_format_(PixelFormatInvalid), bytes_per_row_(0), current_buffer_(0) { } diff --git a/remoting/host/capturer_fake.cc b/remoting/host/capturer_fake.cc index 072f19a..56b43cf 100644 --- a/remoting/host/capturer_fake.cc +++ b/remoting/host/capturer_fake.cc @@ -8,27 +8,13 @@ namespace remoting { -// CapturerFake generates a white picture of size kWidth x kHeight with a -// rectangle of size kBoxWidth x kBoxHeight. The rectangle moves kSpeed pixels -// per frame along both axes, and bounces off the sides of the screen. -static const int kWidth = 800; -static const int kHeight = 600; -static const int kBoxWidth = 140; -static const int kBoxHeight = 140; -static const int kSpeed = 20; - -COMPILE_ASSERT(kBoxWidth < kWidth && kBoxHeight < kHeight, bad_box_size); -COMPILE_ASSERT((kBoxWidth % kSpeed == 0) && (kWidth % kSpeed == 0) && - (kBoxHeight % kSpeed == 0) && (kHeight % kSpeed == 0), - sizes_must_be_multiple_of_kSpeed); - +static const int kWidth = 320; +static const int kHeight = 240; static const int kBytesPerPixel = 4; // 32 bit RGB is 4 bytes per pixel. +static const int kMaxColorChannelValue = 255; CapturerFake::CapturerFake() - : box_pos_x_(0), - box_pos_y_(0), - box_speed_x_(kSpeed), - box_speed_y_(kSpeed) { + : seed_(0) { ScreenConfigurationChanged(); } @@ -38,7 +24,7 @@ CapturerFake::~CapturerFake() { void CapturerFake::ScreenConfigurationChanged() { width_ = kWidth; height_ = kHeight; - pixel_format_ = PIXEL_FORMAT_RGB32; + pixel_format_ = PixelFormatRgb32; bytes_per_row_ = width_ * kBytesPerPixel; // Create memory for the buffers. @@ -68,36 +54,16 @@ void CapturerFake::CaptureRects(const InvalidRects& rects, } void CapturerFake::GenerateImage() { - memset(buffers_[current_buffer_].get(), 0xff, - width_ * height_ * kBytesPerPixel); - - uint8* row = buffers_[current_buffer_].get() + - (box_pos_y_ * width_ + box_pos_x_) * kBytesPerPixel; - - box_pos_x_ += box_speed_x_; - if (box_pos_x_ + kBoxWidth >= width_ || box_pos_x_ == 0) - box_speed_x_ = -box_speed_x_; - - box_pos_y_ += box_speed_y_; - if (box_pos_y_ + kBoxHeight >= height_ || box_pos_y_ == 0) - box_speed_y_ = -box_speed_y_; - - // Draw rectangle with the following colors in it's corners: - // cyan....yellow - // .............. - // blue.......red - for (int y = 0; y < kBoxHeight; ++y) { - for (int x = 0; x < kBoxWidth; ++x) { - int r = x * 255 / kBoxWidth; - int g = y * 255 / kBoxHeight; - int b = 255 - (x * 255 / kBoxWidth); - row[x * kBytesPerPixel] = r; - row[x * kBytesPerPixel+1] = g; - row[x * kBytesPerPixel+2] = b; - row[x * kBytesPerPixel+3] = 0xff; + uint8* row = buffers_[current_buffer_].get(); + for (int y = 0; y < height_; ++y) { + int offset = y % 3; + for (int x = 0; x < width_; ++x) { + row[x * kBytesPerPixel + offset] = seed_++; + seed_ &= kMaxColorChannelValue; } row += bytes_per_row_; } + ++seed_; } } // namespace remoting diff --git a/remoting/host/capturer_fake.h b/remoting/host/capturer_fake.h index 46e0266..84cc7ba 100644 --- a/remoting/host/capturer_fake.h +++ b/remoting/host/capturer_fake.h @@ -10,7 +10,8 @@ namespace remoting { -// A CapturerFake generates artificial image for testing purpose. +// A CapturerFake always output an image of 640x480 in 24bit RGB. The image +// is artificially generated for testing purpose. // // CapturerFake is doubled buffered as required by Capturer. See // remoting/host/capturer.h. @@ -29,10 +30,8 @@ class CapturerFake : public Capturer { // Generates an image in the front buffer. void GenerateImage(); - int box_pos_x_; - int box_pos_y_; - int box_speed_x_; - int box_speed_y_; + // The seed for generating the image. + int seed_; // We have two buffers for the screen images as required by Capturer. scoped_array<uint8> buffers_[kNumBuffers]; diff --git a/remoting/host/capturer_fake_ascii.cc b/remoting/host/capturer_fake_ascii.cc index 4b259a9..1bb9d44 100644 --- a/remoting/host/capturer_fake_ascii.cc +++ b/remoting/host/capturer_fake_ascii.cc @@ -21,7 +21,7 @@ CapturerFakeAscii::~CapturerFakeAscii() { void CapturerFakeAscii::ScreenConfigurationChanged() { width_ = kWidth; height_ = kHeight; - pixel_format_ = PIXEL_FORMAT_ASCII; + pixel_format_ = PixelFormatAscii; bytes_per_row_ = width_ * kBytesPerPixel; // Create memory for the buffers. diff --git a/remoting/host/capturer_gdi.cc b/remoting/host/capturer_gdi.cc index 7742ff9..209eea4 100644 --- a/remoting/host/capturer_gdi.cc +++ b/remoting/host/capturer_gdi.cc @@ -57,7 +57,7 @@ void CapturerGdi::ScreenConfigurationChanged() { int rounded_width = (width_ + 3) & (~3); // Dimensions of screen. - pixel_format_ = PIXEL_FORMAT_RGB32; + pixel_format_ = PixelFormatRgb32; bytes_per_row_ = rounded_width * kBytesPerPixel; // Create a differ for this screen size. diff --git a/remoting/host/capturer_linux.cc b/remoting/host/capturer_linux.cc index 99013d0..fcf7a01 100644 --- a/remoting/host/capturer_linux.cc +++ b/remoting/host/capturer_linux.cc @@ -235,7 +235,7 @@ void CapturerLinuxPimpl::CaptureRects( scoped_refptr<CaptureData> capture_data( new CaptureData(planes, capturer_->width(), capturer_->height(), - PIXEL_FORMAT_RGB32)); + PixelFormatRgb32)); for (InvalidRects::const_iterator it = rects.begin(); it != rects.end(); diff --git a/remoting/host/capturer_mac.cc b/remoting/host/capturer_mac.cc index 3eafa34..6d6b6cc 100644 --- a/remoting/host/capturer_mac.cc +++ b/remoting/host/capturer_mac.cc @@ -47,7 +47,7 @@ void CapturerMac::ScreenConfigurationChanged() { width_ = CGDisplayPixelsWide(mainDevice); height_ = CGDisplayPixelsHigh(mainDevice); bytes_per_row_ = width_ * sizeof(uint32_t); - pixel_format_ = PIXEL_FORMAT_RGB32; + pixel_format_ = PixelFormatRgb32; size_t buffer_size = height() * bytes_per_row_; for (int i = 0; i < kNumBuffers; ++i) { buffers_[i].reset(new uint8[buffer_size]); diff --git a/remoting/host/client_connection.cc b/remoting/host/client_connection.cc index e325bf7..8063d04 100644 --- a/remoting/host/client_connection.cc +++ b/remoting/host/client_connection.cc @@ -55,20 +55,15 @@ void ClientConnection::SendInitClientMessage(int width, int height) { video_writer_.SendMessage(msg); } -void ClientConnection::SendVideoPacket(const VideoPacket& packet) { +void ClientConnection::SendUpdateStreamPacketMessage( + const ChromotingHostMessage& message) { DCHECK_EQ(loop_, MessageLoop::current()); // If we are disconnected then return. if (!connection_) return; - ChromotingHostMessage* message = new ChromotingHostMessage(); - // TODO(sergeyu): avoid memcopy here. - *message->mutable_video_packet() = packet; - - video_writer_.SendMessage(*message); - - delete message; + video_writer_.SendMessage(message); } int ClientConnection::GetPendingUpdateStreamMessages() { diff --git a/remoting/host/client_connection.h b/remoting/host/client_connection.h index 38e383c..69283e2 100644 --- a/remoting/host/client_connection.h +++ b/remoting/host/client_connection.h @@ -64,7 +64,8 @@ class ClientConnection : public base::RefCountedThreadSafe<ClientConnection> { virtual void SendInitClientMessage(int width, int height); // Send encoded update stream data to the viewer. - virtual void SendVideoPacket(const VideoPacket& packet); + virtual void SendUpdateStreamPacketMessage( + const ChromotingHostMessage& message); // Gets the number of update stream messages not yet transmitted. // Note that the value returned is an estimate using average size of the diff --git a/remoting/host/client_connection_unittest.cc b/remoting/host/client_connection_unittest.cc index b5802cf..240d8f4 100644 --- a/remoting/host/client_connection_unittest.cc +++ b/remoting/host/client_connection_unittest.cc @@ -46,8 +46,8 @@ class ClientConnectionTest : public testing::Test { TEST_F(ClientConnectionTest, SendUpdateStream) { // Then send the actual data. - VideoPacket packet; - viewer_->SendVideoPacket(packet); + ChromotingHostMessage message; + viewer_->SendUpdateStreamPacketMessage(message); // And then close the connection to ClientConnection. viewer_->Disconnect(); @@ -76,8 +76,8 @@ TEST_F(ClientConnectionTest, Close) { message_loop_.RunAllPending(); EXPECT_TRUE(connection_->is_closed()); - VideoPacket packet; - viewer_->SendVideoPacket(packet); + ChromotingHostMessage message; + viewer_->SendUpdateStreamPacketMessage(message); viewer_->Disconnect(); message_loop_.RunAllPending(); diff --git a/remoting/host/mock_objects.h b/remoting/host/mock_objects.h index 428bd74..50c94ef 100644 --- a/remoting/host/mock_objects.h +++ b/remoting/host/mock_objects.h @@ -47,7 +47,10 @@ class MockClientConnection : public ClientConnection { MOCK_METHOD1(Init, void(ChromotingConnection* connection)); MOCK_METHOD2(SendInitClientMessage, void(int width, int height)); - MOCK_METHOD1(SendVideoPacket, void(const VideoPacket& packet)); + MOCK_METHOD0(SendBeginUpdateStreamMessage, void()); + MOCK_METHOD1(SendUpdateStreamPacketMessage, + void(const ChromotingHostMessage& message)); + MOCK_METHOD0(SendEndUpdateStreamMessage, void()); MOCK_METHOD0(GetPendingUpdateStreamMessages, int()); MOCK_METHOD0(Disconnect, void()); diff --git a/remoting/host/session_manager.cc b/remoting/host/session_manager.cc index 1867f07..a61a6bdd 100644 --- a/remoting/host/session_manager.cc +++ b/remoting/host/session_manager.cc @@ -328,16 +328,18 @@ void SessionManager::DoRateControl() { ScheduleNextRateControl(); } -void SessionManager::DoSendVideoPacket(VideoPacket* packet) { +void SessionManager::DoSendUpdate(ChromotingHostMessage* message, + Encoder::EncodingState state) { DCHECK_EQ(network_loop_, MessageLoop::current()); TraceContext::tracer()->PrintString("DoSendUpdate"); for (ClientConnectionList::const_iterator i = clients_.begin(); i < clients_.end(); ++i) { - (*i)->SendVideoPacket(*packet); + (*i)->SendUpdateStreamPacketMessage(*message); } - delete packet; + + delete message; TraceContext::tracer()->PrintString("DoSendUpdate done"); } @@ -397,20 +399,19 @@ void SessionManager::DoEncode( TraceContext::tracer()->PrintString("Encode Done"); } -void SessionManager::EncodeDataAvailableTask(VideoPacket* packet) { +void SessionManager::EncodeDataAvailableTask( + ChromotingHostMessage* message, Encoder::EncodingState state) { DCHECK_EQ(encode_loop_, MessageLoop::current()); - bool last = (packet->flags() & VideoPacket::LAST_PACKET) != 0; - // Before a new encode task starts, notify clients a new update // stream is coming. // Notify this will keep a reference to the DataBuffer in the // task. The ownership will eventually pass to the ClientConnections. network_loop_->PostTask( FROM_HERE, - NewTracedMethod(this, &SessionManager::DoSendVideoPacket, packet)); + NewTracedMethod(this, &SessionManager::DoSendUpdate, message, state)); - if (last) { + if (state & Encoder::EncodingEnded) { capture_loop_->PostTask( FROM_HERE, NewTracedMethod(this, &SessionManager::DoFinishEncode)); } diff --git a/remoting/host/session_manager.h b/remoting/host/session_manager.h index fedfc4f..46e02df 100644 --- a/remoting/host/session_manager.h +++ b/remoting/host/session_manager.h @@ -14,7 +14,8 @@ #include "base/time.h" #include "remoting/base/encoder.h" #include "remoting/host/capturer.h" -#include "remoting/proto/video.pb.h" +// TODO(hclam): This class should not know the internal protobuf types. +#include "remoting/proto/internal.pb.h" namespace remoting { @@ -125,7 +126,8 @@ class SessionManager : public base::RefCountedThreadSafe<SessionManager> { void DoRateControl(); // DoSendUpdate takes ownership of header and is responsible for deleting it. - void DoSendVideoPacket(VideoPacket* packet); + void DoSendUpdate(ChromotingHostMessage* message, + Encoder::EncodingState state); void DoSendInit(scoped_refptr<ClientConnection> client, int width, int height); @@ -139,7 +141,8 @@ class SessionManager : public base::RefCountedThreadSafe<SessionManager> { // EncodeDataAvailableTask takes ownership of header and is responsible for // deleting it. - void EncodeDataAvailableTask(VideoPacket* packet); + void EncodeDataAvailableTask(ChromotingHostMessage* message, + Encoder::EncodingState state); // Message loops used by this class. MessageLoop* capture_loop_; diff --git a/remoting/host/session_manager_unittest.cc b/remoting/host/session_manager_unittest.cc index f3870e2..9aa34d8 100644 --- a/remoting/host/session_manager_unittest.cc +++ b/remoting/host/session_manager_unittest.cc @@ -19,9 +19,8 @@ namespace remoting { static const int kWidth = 640; static const int kHeight = 480; -static const PixelFormat kFormat = PIXEL_FORMAT_RGB32; -static const VideoPacketFormat::Encoding kEncoding = - VideoPacketFormat::ENCODING_VERBATIM; +static const PixelFormat kFormat = PixelFormatRgb32; +static const UpdateStreamEncoding kEncoding = EncodingNone; class SessionManagerTest : public testing::Test { public: @@ -65,7 +64,10 @@ ACTION_P2(RunCallback, rects, data) { } ACTION_P(FinishEncode, msg) { - arg2->Run(msg); + Encoder::EncodingState state = (Encoder::EncodingStarting | + Encoder::EncodingInProgress | + Encoder::EncodingEnded); + arg2->Run(msg, state); delete arg2; } @@ -96,12 +98,14 @@ TEST_F(SessionManagerTest, DISABLED_OneRecordCycle) { .WillOnce(RunCallback(update_rects, data)); // Expect the encoder be called. - VideoPacket* packet = new VideoPacket(); + ChromotingHostMessage* msg = new ChromotingHostMessage(); EXPECT_CALL(*encoder_, Encode(data, false, NotNull())) - .WillOnce(FinishEncode(packet)); + .WillOnce(FinishEncode(msg)); // Expect the client be notified. - EXPECT_CALL(*client_, SendVideoPacket(_)); + EXPECT_CALL(*client_, SendBeginUpdateStreamMessage()); + EXPECT_CALL(*client_, SendUpdateStreamPacketMessage(_)); + EXPECT_CALL(*client_, SendEndUpdateStreamMessage()); EXPECT_CALL(*client_, GetPendingUpdateStreamMessages()) .Times(AtLeast(0)) .WillRepeatedly(Return(0)); diff --git a/remoting/host/simple_host_process.cc b/remoting/host/simple_host_process.cc index 2733850..d998ef9 100644 --- a/remoting/host/simple_host_process.cc +++ b/remoting/host/simple_host_process.cc @@ -25,18 +25,14 @@ #include "base/logging.h" #include "base/mac/scoped_nsautorelease_pool.h" #include "base/nss_util.h" -#include "base/path_service.h" #include "base/thread.h" -#include "media/base/media.h" #include "remoting/base/encoder_verbatim.h" -#include "remoting/base/encoder_vp8.h" #include "remoting/base/encoder_zlib.h" -#include "remoting/base/tracer.h" #include "remoting/host/capturer_fake.h" #include "remoting/host/chromoting_host.h" #include "remoting/host/chromoting_host_context.h" #include "remoting/host/json_host_config.h" -#include "remoting/proto/video.pb.h" +#include "remoting/base/tracer.h" #if defined(OS_WIN) #include "remoting/host/capturer_gdi.h" @@ -66,7 +62,6 @@ void ShutdownTask(MessageLoop* message_loop) { const std::string kFakeSwitchName = "fake"; const std::string kConfigSwitchName = "config"; const std::string kVerbatimSwitchName = "verbatim"; -const std::string kVp8SwitchName = "vp8"; int main(int argc, char** argv) { // Needed for the Mac, so we don't leak objects when threads are created. @@ -97,15 +92,14 @@ int main(int argc, char** argv) { // Check the argument to see if we should use a fake capturer and encoder. bool fake = cmd_line->HasSwitch(kFakeSwitchName); bool verbatim = cmd_line->HasSwitch(kVerbatimSwitchName); - bool vp8 = cmd_line->HasSwitch(kVp8SwitchName); #if defined(OS_WIN) - std::wstring home_path = GetEnvironmentVar(kHomeDrive); - home_path += GetEnvironmentVar(kHomePath); + std::wstring path = GetEnvironmentVar(kHomeDrive); + path += GetEnvironmentVar(kHomePath); #else - std::string home_path = GetEnvironmentVar(base::env_vars::kHome); + std::string path = GetEnvironmentVar(base::env_vars::kHome); #endif - FilePath config_path(home_path); + FilePath config_path(path); config_path = config_path.Append(kDefaultConfigPath); if (cmd_line->HasSwitch(kConfigSwitchName)) { config_path = cmd_line->GetSwitchValuePath(kConfigSwitchName); @@ -122,14 +116,6 @@ int main(int argc, char** argv) { encoder.reset(new remoting::EncoderVerbatim()); } - // TODO(sergeyu): Enable VP8 on ARM builds. -#if !defined(ARCH_CPU_ARM_FAMILY) - if (vp8) { - LOG(INFO) << "Using the verbatim encoder."; - encoder.reset(new remoting::EncoderVp8()); - } -#endif - base::Thread file_io_thread("FileIO"); file_io_thread.Start(); @@ -146,11 +132,6 @@ int main(int argc, char** argv) { remoting::ChromotingHostContext context; context.Start(); - FilePath module_path; - PathService::Get(base::DIR_MODULE, &module_path); - CHECK(media::InitializeMediaLibrary(module_path)) - << "Cannot load media library"; - // Construct a chromoting host. scoped_refptr<remoting::ChromotingHost> host( new remoting::ChromotingHost(&context, diff --git a/remoting/proto/chromotocol.gyp b/remoting/proto/chromotocol.gyp index faab3e7..3d55d21 100644 --- a/remoting/proto/chromotocol.gyp +++ b/remoting/proto/chromotocol.gyp @@ -16,7 +16,6 @@ 'control.proto', 'event.proto', 'internal.proto', - 'video.proto', ], 'rules': [ { @@ -72,8 +71,6 @@ '<(out_dir)/event.pb.h', '<(out_dir)/internal.pb.cc', '<(out_dir)/internal.pb.h', - '<(out_dir)/video.pb.cc', - '<(out_dir)/video.pb.h', ], }, ], diff --git a/remoting/proto/event.proto b/remoting/proto/event.proto index 2526d78..96c8fce 100644 --- a/remoting/proto/event.proto +++ b/remoting/proto/event.proto @@ -1,7 +1,7 @@ // Copyright (c) 2010 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. - +// // Protocol for event messages. syntax = "proto2"; @@ -10,6 +10,141 @@ option optimize_for = LITE_RUNTIME; package remoting; +// A message that gets sent to the client after the client is connected to the +// host. It contains information that the client needs to know about the host. +// NEXT ID: 3 +message InitClientMessage { + required int32 width = 1; + required int32 height = 2; +} + +// A message to denote the beginning of an update stream. It will be followed +// by 0 or more UpdateStreamPacketMessages and then a EndUpdateStreamMessage. +// NEXT ID: 1 +message BeginUpdateStreamMessage { +} + +// A message to denote the end of an update stream. +// NEXT ID: 1 +message EndUpdateStreamMessage { +} + +// Identifies how the image was encoded. +enum UpdateStreamEncoding { + EncodingInvalid = -1; + EncodingNone = 0; + EncodingZlib = 1; + EncodingVp8 = 2; +} + +// Identifies the pixel format. +// Note that this list should match exactly the same as +// media::VideoFrame::Format in media/base/video_frame.h. +enum PixelFormat { + PixelFormatInvalid = 0; + PixelFormatRgb555 = 1; + PixelFormatRgb565 = 2; + PixelFormatRgb24 = 3; + PixelFormatRgb32 = 4; + PixelFormatRgba = 5; + PixelFormatYv12 = 6; + PixelFormatYv16 = 7; + PixelFormatNv12 = 8; + PixelFormatEmpty = 9; + PixelFormatAscii = 10; +} + +// A message that denotes the beginning of an updating rectangle in an update +// stream packet. +// NEXT ID: 6 +message UpdateStreamBeginRect { + // X,Y coordinates (in screen pixels) for origin of this update. + required int32 x = 1; + required int32 y = 2; + + // Width, height (in screen pixels) for this update. + required int32 width = 3; + required int32 height = 4; + + // The encoding used for this image update. + optional UpdateStreamEncoding encoding = 5 [default=EncodingNone]; + + // The pixel format of this image. + optional PixelFormat pixel_format = 6 [default=PixelFormatRgb24]; +} + +// A message that contains partial data for updating an rectangle in an +// update stream packet. +// NEXT ID: 3 +message UpdateStreamRectData { + // The sequence number of the partial data for updating a rectangle. + optional int32 sequence_number = 1 [default=0]; + + // The partial data for updating a rectangle. + required bytes data = 2; +} + +// A message that denotes the end of an updating rectangle. +// NEXT ID: 1 +message UpdateStreamEndRect { +} + +// A message to denote a partial update stream. +// NEXT ID: 4 +message UpdateStreamPacketMessage { + optional UpdateStreamBeginRect begin_rect = 1; + optional UpdateStreamRectData rect_data = 2; + optional UpdateStreamEndRect end_rect = 3; +} + +// TODO(ajwong): Determine if these fields should be optional or required. +message RectangleFormat { + // X,Y coordinates (in screen pixels) for origin of this update. + required int32 x = 1; + required int32 y = 2; + + // Width, height (in screen pixels) for this update. + required int32 width = 3; + required int32 height = 4; + + // The encoding used for this image update. + optional UpdateStreamEncoding encoding = 5 [default = EncodingInvalid]; + + // The pixel format of this image. + optional PixelFormat pixel_format = 6 [default = PixelFormatRgb24]; +} + +message RectangleUpdatePacket { + // Bitmasks for use in the flags field below. + // + // The encoder may fragment one update into multiple packets depending on + // how the encoder outputs data. Thus, one update can logically consist of + // multiple packets. The FIRST_PACKET and LAST_PACKET flags are used to + // indicate the start and end of a logical update. Here are notable + // consequences: + // * Both FIRST_PACKET and LAST_PACKET may be set if an update is only + // one packet long. + // * The RectangleFormat is only supplied in a FIRST_PACKET. + // * An local update cannot change format between a FIRST_PACKET and + // a LAST_PACKET. + // * All packets in one logical update must be processed in order, and + // packets may not be skipped. + enum Flags { + FIRST_PACKET = 1; + LAST_PACKET = 2; + } + optional int32 flags = 1 [default = 0]; + + // The sequence number of the partial data for updating a rectangle. + optional int32 sequence_number = 2 [default = 0]; + + // This is provided on the first packet of the rectangle data, when + // the flags has FIRST_PACKET set. + optional RectangleFormat format = 3; + + optional bytes encoded_rect = 4; +} + // Defines a keyboard event. // NEXT ID: 3 message KeyEvent { diff --git a/remoting/proto/internal.proto b/remoting/proto/internal.proto index 3319420..2e74501 100644 --- a/remoting/proto/internal.proto +++ b/remoting/proto/internal.proto @@ -1,14 +1,13 @@ // Copyright (c) 2010 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. - +// // Internal messages as a unit for transmission in the wire. syntax = "proto2"; import "control.proto"; import "event.proto"; -import "video.proto"; option optimize_for = LITE_RUNTIME; @@ -19,7 +18,10 @@ package remoting; // NEXT ID: 5 message ChromotingHostMessage { optional InitClientMessage init_client= 1; - optional VideoPacket video_packet = 2; + optional BeginUpdateStreamMessage begin_update_stream = 2; + optional EndUpdateStreamMessage end_update_stream = 3; + optional UpdateStreamPacketMessage update_stream_packet = 4; + optional RectangleUpdatePacket rectangle_update = 5; } // Defines the message that is sent from the client to the host. diff --git a/remoting/proto/video.proto b/remoting/proto/video.proto deleted file mode 100644 index c91a0a3..0000000 --- a/remoting/proto/video.proto +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright (c) 2010 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Protocol for video messages. - -syntax = "proto2"; - -option optimize_for = LITE_RUNTIME; - -package remoting; - -// A message that gets sent to the client after the client is connected to the -// host. It contains information that the client needs to know about the host. -// NEXT ID: 3 -// TODO(sergeyu): Move to the control channel. -message InitClientMessage { - required int32 width = 1; - required int32 height = 2; -} - -// Identifies the pixel format. -// Note that this list should match exactly the same as -// media::VideoFrame::Format in media/base/video_frame.h. -enum PixelFormat { - PIXEL_FORMAT_INVALID = 0; - PIXEL_FORMAT_RGB555 = 1; - PIXEL_FORMAT_RGB565 = 2; - PIXEL_FORMAT_RGB24 = 3; - PIXEL_FORMAT_RGB32 = 4; - PIXEL_FORMAT_RGBA = 5; - PIXEL_FORMAT_YV12 = 6; - PIXEL_FORMAT_YV16 = 7; - PIXEL_FORMAT_NV12 = 8; - PIXEL_FORMAT_EMPTY = 9; - PIXEL_FORMAT_ASCII = 10; -} - -// TODO(ajwong): Determine if these fields should be optional or required. -message VideoPacketFormat { - // Identifies how the image was encoded. - enum Encoding { - ENCODING_INVALID = -1; - ENCODING_VERBATIM = 0; - ENCODING_ZLIB = 1; - ENCODING_VP8 = 2; - }; - - // X,Y coordinates (in screen pixels) for origin of this update. - optional int32 x = 1; - optional int32 y = 2; - - // Width, height (in screen pixels) for this update. - optional int32 width = 3; - optional int32 height = 4; - - // The encoding used for this image update. - optional Encoding encoding = 5 [default = ENCODING_INVALID]; - - // The pixel format of this image. - optional PixelFormat pixel_format = 6 [default = PIXEL_FORMAT_RGB24]; -} - -message VideoPacket { - // Bitmasks for use in the flags field below. - // - // The encoder may fragment one update into multiple packets depending on - // how the encoder outputs data. Thus, one update can logically consist of - // multiple packets. The FIRST_PACKET and LAST_PACKET flags are used to - // indicate the start and end of a logical update. Here are notable - // consequences: - // * Both FIRST_PACKET and LAST_PACKET may be set if an update is only - // one packet long. - // * The VideoPacketFormat is only supplied in a FIRST_PACKET. - // * An local update cannot change format between a FIRST_PACKET and - // a LAST_PACKET. - // * All packets in one logical update must be processed in order, and - // packets may not be skipped. - enum Flags { - FIRST_PACKET = 1; - LAST_PACKET = 2; - } - optional int32 flags = 1 [default = 0]; - - // The sequence number of the partial data for updating a rectangle. - optional int32 sequence_number = 2 [default = 0]; - - // This is provided on the first packet of the rectangle data, when - // the flags has FIRST_PACKET set. - optional VideoPacketFormat format = 3; - - optional bytes data = 4; -} diff --git a/remoting/protocol/host_message_dispatcher.cc b/remoting/protocol/host_message_dispatcher.cc index 148dccf..768789e 100644 --- a/remoting/protocol/host_message_dispatcher.cc +++ b/remoting/protocol/host_message_dispatcher.cc @@ -6,7 +6,6 @@ #include "remoting/base/multiple_array_input_stream.h" #include "remoting/proto/control.pb.h" #include "remoting/proto/event.pb.h" -#include "remoting/proto/video.pb.h" #include "remoting/protocol/chromotocol_connection.h" #include "remoting/protocol/host_message_dispatcher.h" #include "remoting/protocol/host_control_message_handler.h" diff --git a/remoting/protocol/message_decoder_unittest.cc b/remoting/protocol/message_decoder_unittest.cc index 90c6680..a2d4b20 100644 --- a/remoting/protocol/message_decoder_unittest.cc +++ b/remoting/protocol/message_decoder_unittest.cc @@ -41,8 +41,18 @@ static void PrepareData(uint8** buffer, int* size) { // Then append 10 update sequences to the data. for (int i = 0; i < 10; ++i) { - msg.mutable_video_packet()->set_sequence_number(0); - msg.mutable_video_packet()->set_data(kTestData); + msg.mutable_begin_update_stream(); + AppendMessage(msg, &encoded_data); + msg.Clear(); + + msg.mutable_update_stream_packet()->mutable_rect_data()-> + set_sequence_number(0); + msg.mutable_update_stream_packet()->mutable_rect_data()-> + set_data(kTestData); + AppendMessage(msg, &encoded_data); + msg.Clear(); + + msg.mutable_end_update_stream(); AppendMessage(msg, &encoded_data); msg.Clear(); } @@ -80,19 +90,30 @@ void SimulateReadSequence(const int read_sequence[], int sequence_size) { } // Then verify the decoded messages. - EXPECT_EQ(11u, message_list.size()); + EXPECT_EQ(31u, message_list.size()); EXPECT_TRUE(message_list.front()->has_init_client()); delete message_list.front(); message_list.pop_front(); + int index = 0; for (std::list<ChromotingHostMessage*>::iterator it = message_list.begin(); it != message_list.end(); ++it) { ChromotingHostMessage* message = *it; - // Partial update stream. - EXPECT_TRUE(message->has_video_packet()); - EXPECT_EQ(kTestData, - message->video_packet().data().data()); + int type = index % 3; + ++index; + if (type == 0) { + // Begin update stream. + EXPECT_TRUE(message->has_begin_update_stream()); + } else if (type == 1) { + // Partial update stream. + EXPECT_TRUE(message->has_update_stream_packet()); + EXPECT_EQ(kTestData, + message->update_stream_packet().rect_data().data()); + } else if (type == 2) { + // End update stream. + EXPECT_TRUE(message->has_end_update_stream()); + } } STLDeleteElements(&message_list); } diff --git a/remoting/remoting.gyp b/remoting/remoting.gyp index 3f5c750..ec32165 100644 --- a/remoting/remoting.gyp +++ b/remoting/remoting.gyp @@ -138,8 +138,8 @@ 'base/constants.cc', 'base/constants.h', 'base/decoder.h', - 'base/decoder_vp8.cc', - 'base/decoder_vp8.h', +# BUG57374,BUG57266 'base/decoder_vp8.cc', +# BUG57374,BUG57266 'base/decoder_vp8.h', 'base/decoder_row_based.cc', 'base/decoder_row_based.h', 'base/decompressor.h', @@ -150,8 +150,8 @@ 'base/encoder.h', 'base/encoder_verbatim.cc', 'base/encoder_verbatim.h', - 'base/encoder_vp8.cc', - 'base/encoder_vp8.h', +# BUG57374 'base/encoder_vp8.cc', +# BUG57374 'base/encoder_vp8.h', 'base/encoder_zlib.cc', 'base/encoder_zlib.h', 'base/multiple_array_input_stream.cc', @@ -162,16 +162,6 @@ 'base/util.cc', 'base/util.h', ], - 'conditions': [ - ['target_arch=="arm"', { - '!sources': [ - 'base/decoder_vp8.cc', - 'base/decoder_vp8.h', - 'base/encoder_vp8.cc', - 'base/encoder_vp8.h', - ], - }], - ], }, # end of target 'chromoting_base' { @@ -430,11 +420,11 @@ # BUG57351 'base/codec_test.cc', # BUG57351 'base/codec_test.h', 'base/compressor_zlib_unittest.cc', -# BUG57351 'base/decoder_vp8_unittest.cc', +# BUG57374 'base/decoder_vp8_unittest.cc', 'base/decompressor_zlib_unittest.cc', # BUG57351 'base/encode_decode_unittest.cc', # BUG57351 'base/encoder_verbatim_unittest.cc', -# BUG57351 'base/encoder_vp8_unittest.cc', +# BUG57374 'base/encoder_vp8_unittest.cc', # BUG57351 'base/encoder_zlib_unittest.cc', 'base/mock_objects.h', 'base/multiple_array_input_stream_unittest.cc', |