summaryrefslogtreecommitdiffstats
path: root/media/cast
diff options
context:
space:
mode:
authorszym@chromium.org <szym@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-12-14 01:17:17 +0000
committerszym@chromium.org <szym@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-12-14 01:17:17 +0000
commitb613f8389d2143412f7a639f4eaf900f340c2fed (patch)
tree50f7cd146dacfeca4b7b1a8d497cabb83ad37569 /media/cast
parent328d281aa89646eaf87a3442b30e7d9c5bfd1e1b (diff)
downloadchromium_src-b613f8389d2143412f7a639f4eaf900f340c2fed.zip
chromium_src-b613f8389d2143412f7a639f4eaf900f340c2fed.tar.gz
chromium_src-b613f8389d2143412f7a639f4eaf900f340c2fed.tar.bz2
Revert 240828 "Revert of https://codereview.chromium.org/104403010/"
Bad auto-revert deleted the file instead of removing a few lines. Broke compile on main waterfall. > Revert of https://codereview.chromium.org/104403010/ > Reason for revert: Possible breakage of unittest (due to a possible bad rebase) > > TBR=hclam@chromium.org,hguihot@chromium.org,hguihot@google.com > NOTREECHECKS=true > NOTRY=true > > Review URL: https://codereview.chromium.org/111503007 TBR=mikhal@chromium.org Review URL: https://codereview.chromium.org/99033006 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@240831 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media/cast')
-rw-r--r--media/cast/audio_receiver/audio_receiver.cc498
-rw-r--r--media/cast/audio_sender/audio_sender.cc210
-rw-r--r--media/cast/framer/cast_message_builder.cc192
-rw-r--r--media/cast/framer/frame_id_map.cc256
-rw-r--r--media/cast/framer/framer.cc139
-rw-r--r--media/cast/net/rtp_sender/packet_storage/packet_storage.cc174
-rw-r--r--media/cast/net/rtp_sender/rtp_sender.cc145
-rw-r--r--media/cast/rtcp/rtcp_sender_unittest.cc590
-rw-r--r--media/cast/test/receiver.cc264
-rw-r--r--media/cast/test/sender.cc365
-rw-r--r--media/cast/test/transport/transport.cc218
-rw-r--r--media/cast/video_receiver/codecs/vp8/vp8_decoder.cc106
-rw-r--r--media/cast/video_receiver/video_receiver.cc471
-rw-r--r--media/cast/video_sender/codecs/vp8/vp8_encoder.cc372
-rw-r--r--media/cast/video_sender/video_sender.cc455
15 files changed, 4455 insertions, 0 deletions
diff --git a/media/cast/audio_receiver/audio_receiver.cc b/media/cast/audio_receiver/audio_receiver.cc
new file mode 100644
index 0000000..39c02c9
--- /dev/null
+++ b/media/cast/audio_receiver/audio_receiver.cc
@@ -0,0 +1,498 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/audio_receiver/audio_receiver.h"
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/cast/audio_receiver/audio_decoder.h"
+#include "media/cast/framer/framer.h"
+#include "media/cast/rtcp/rtcp.h"
+#include "media/cast/rtp_receiver/rtp_receiver.h"
+
+// Max time we wait until an audio frame is due to be played out is released.
+static const int64 kMaxAudioFrameWaitMs = 20;
+static const int64 kMinSchedulingDelayMs = 1;
+
+namespace media {
+namespace cast {
+
+DecodedAudioCallbackData::DecodedAudioCallbackData()
+ : number_of_10ms_blocks(0),
+ desired_frequency(0),
+ callback() {}
+
+DecodedAudioCallbackData::~DecodedAudioCallbackData() {}
+
+// Local implementation of RtpData (defined in rtp_rtcp_defines.h).
+// Used to pass payload data into the audio receiver.
+class LocalRtpAudioData : public RtpData {
+ public:
+ explicit LocalRtpAudioData(AudioReceiver* audio_receiver)
+ : audio_receiver_(audio_receiver) {}
+
+ virtual void OnReceivedPayloadData(
+ const uint8* payload_data,
+ size_t payload_size,
+ const RtpCastHeader* rtp_header) OVERRIDE {
+ audio_receiver_->IncomingParsedRtpPacket(payload_data, payload_size,
+ *rtp_header);
+ }
+
+ private:
+ AudioReceiver* audio_receiver_;
+};
+
+// Local implementation of RtpPayloadFeedback (defined in rtp_defines.h)
+// Used to convey cast-specific feedback from receiver to sender.
+class LocalRtpAudioFeedback : public RtpPayloadFeedback {
+ public:
+ explicit LocalRtpAudioFeedback(AudioReceiver* audio_receiver)
+ : audio_receiver_(audio_receiver) {
+ }
+
+ virtual void CastFeedback(const RtcpCastMessage& cast_message) OVERRIDE {
+ audio_receiver_->CastFeedback(cast_message);
+ }
+
+ private:
+ AudioReceiver* audio_receiver_;
+};
+
+class LocalRtpReceiverStatistics : public RtpReceiverStatistics {
+ public:
+ explicit LocalRtpReceiverStatistics(RtpReceiver* rtp_receiver)
+ : rtp_receiver_(rtp_receiver) {
+ }
+
+ virtual void GetStatistics(uint8* fraction_lost,
+ uint32* cumulative_lost, // 24 bits valid.
+ uint32* extended_high_sequence_number,
+ uint32* jitter) OVERRIDE {
+ rtp_receiver_->GetStatistics(fraction_lost,
+ cumulative_lost,
+ extended_high_sequence_number,
+ jitter);
+ }
+
+ private:
+ RtpReceiver* rtp_receiver_;
+};
+
+AudioReceiver::AudioReceiver(scoped_refptr<CastEnvironment> cast_environment,
+ const AudioReceiverConfig& audio_config,
+ PacedPacketSender* const packet_sender)
+ : cast_environment_(cast_environment),
+ codec_(audio_config.codec),
+ frequency_(audio_config.frequency),
+ audio_buffer_(),
+ audio_decoder_(),
+ time_offset_(),
+ weak_factory_(this) {
+ target_delay_delta_ =
+ base::TimeDelta::FromMilliseconds(audio_config.rtp_max_delay_ms);
+ incoming_payload_callback_.reset(new LocalRtpAudioData(this));
+ incoming_payload_feedback_.reset(new LocalRtpAudioFeedback(this));
+ if (audio_config.use_external_decoder) {
+ audio_buffer_.reset(new Framer(cast_environment->Clock(),
+ incoming_payload_feedback_.get(),
+ audio_config.incoming_ssrc,
+ true,
+ 0));
+ } else {
+ audio_decoder_.reset(new AudioDecoder(cast_environment,
+ audio_config,
+ incoming_payload_feedback_.get()));
+ }
+ if (audio_config.aes_iv_mask.size() == kAesKeySize &&
+ audio_config.aes_key.size() == kAesKeySize) {
+ iv_mask_ = audio_config.aes_iv_mask;
+ crypto::SymmetricKey* key = crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES, audio_config.aes_key);
+ decryptor_.reset(new crypto::Encryptor());
+ decryptor_->Init(key, crypto::Encryptor::CTR, std::string());
+ } else if (audio_config.aes_iv_mask.size() != 0 ||
+ audio_config.aes_key.size() != 0) {
+ DCHECK(false) << "Invalid crypto configuration";
+ }
+
+ rtp_receiver_.reset(new RtpReceiver(cast_environment->Clock(),
+ &audio_config,
+ NULL,
+ incoming_payload_callback_.get()));
+ rtp_audio_receiver_statistics_.reset(
+ new LocalRtpReceiverStatistics(rtp_receiver_.get()));
+ base::TimeDelta rtcp_interval_delta =
+ base::TimeDelta::FromMilliseconds(audio_config.rtcp_interval);
+ rtcp_.reset(new Rtcp(cast_environment,
+ NULL,
+ packet_sender,
+ NULL,
+ rtp_audio_receiver_statistics_.get(),
+ audio_config.rtcp_mode,
+ rtcp_interval_delta,
+ audio_config.feedback_ssrc,
+ audio_config.incoming_ssrc,
+ audio_config.rtcp_c_name));
+}
+
+AudioReceiver::~AudioReceiver() {}
+
+void AudioReceiver::InitializeTimers() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ ScheduleNextRtcpReport();
+ ScheduleNextCastMessage();
+}
+
+void AudioReceiver::IncomingParsedRtpPacket(const uint8* payload_data,
+ size_t payload_size,
+ const RtpCastHeader& rtp_header) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ cast_environment_->Logging()->InsertPacketEvent(kPacketReceived,
+ rtp_header.webrtc.header.timestamp, rtp_header.frame_id,
+ rtp_header.packet_id, rtp_header.max_packet_id, payload_size);
+
+ // TODO(pwestin): update this as video to refresh over time.
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (time_first_incoming_packet_.is_null()) {
+ InitializeTimers();
+ first_incoming_rtp_timestamp_ = rtp_header.webrtc.header.timestamp;
+ time_first_incoming_packet_ = cast_environment_->Clock()->NowTicks();
+ }
+
+ if (audio_decoder_) {
+ DCHECK(!audio_buffer_) << "Invalid internal state";
+ std::string plaintext(reinterpret_cast<const char*>(payload_data),
+ payload_size);
+ if (decryptor_) {
+ plaintext.clear();
+ if (!decryptor_->SetCounter(GetAesNonce(rtp_header.frame_id, iv_mask_))) {
+ NOTREACHED() << "Failed to set counter";
+ return;
+ }
+ if (!decryptor_->Decrypt(base::StringPiece(reinterpret_cast<const char*>(
+ payload_data), payload_size), &plaintext)) {
+ LOG(ERROR) << "Decryption error";
+ return;
+ }
+ }
+ audio_decoder_->IncomingParsedRtpPacket(
+ reinterpret_cast<const uint8*>(plaintext.data()), plaintext.size(),
+ rtp_header);
+ if (!queued_decoded_callbacks_.empty()) {
+ DecodedAudioCallbackData decoded_data = queued_decoded_callbacks_.front();
+ queued_decoded_callbacks_.pop_front();
+ cast_environment_->PostTask(CastEnvironment::AUDIO_DECODER, FROM_HERE,
+ base::Bind(&AudioReceiver::DecodeAudioFrameThread,
+ base::Unretained(this),
+ decoded_data.number_of_10ms_blocks,
+ decoded_data.desired_frequency,
+ decoded_data.callback));
+ }
+ return;
+ }
+
+ DCHECK(audio_buffer_) << "Invalid internal state";
+ DCHECK(!audio_decoder_) << "Invalid internal state";
+
+ bool complete = audio_buffer_->InsertPacket(payload_data, payload_size,
+ rtp_header);
+ if (!complete) return; // Audio frame not complete; wait for more packets.
+ if (queued_encoded_callbacks_.empty()) return;
+
+ AudioFrameEncodedCallback callback = queued_encoded_callbacks_.front();
+ queued_encoded_callbacks_.pop_front();
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&AudioReceiver::GetEncodedAudioFrame,
+ weak_factory_.GetWeakPtr(), callback));
+}
+
+void AudioReceiver::GetRawAudioFrame(int number_of_10ms_blocks,
+ int desired_frequency, const AudioFrameDecodedCallback& callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(audio_decoder_) << "Invalid function call in this configuration";
+ // TODO(pwestin): we can skip this function by posting direct to the decoder.
+ cast_environment_->PostTask(CastEnvironment::AUDIO_DECODER, FROM_HERE,
+ base::Bind(&AudioReceiver::DecodeAudioFrameThread,
+ base::Unretained(this),
+ number_of_10ms_blocks,
+ desired_frequency,
+ callback));
+}
+
+void AudioReceiver::DecodeAudioFrameThread(
+ int number_of_10ms_blocks,
+ int desired_frequency,
+ const AudioFrameDecodedCallback callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::AUDIO_DECODER));
+ // TODO(mikhal): Allow the application to allocate this memory.
+ scoped_ptr<PcmAudioFrame> audio_frame(new PcmAudioFrame());
+
+ uint32 rtp_timestamp = 0;
+ if (!audio_decoder_->GetRawAudioFrame(number_of_10ms_blocks,
+ desired_frequency,
+ audio_frame.get(),
+ &rtp_timestamp)) {
+ DecodedAudioCallbackData callback_data;
+ callback_data.number_of_10ms_blocks = number_of_10ms_blocks;
+ callback_data.desired_frequency = desired_frequency;
+ callback_data.callback = callback;
+ queued_decoded_callbacks_.push_back(callback_data);
+ return;
+ }
+ base::TimeTicks now = cast_environment_->Clock()->NowTicks();
+
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&AudioReceiver::ReturnDecodedFrameWithPlayoutDelay,
+ base::Unretained(this), base::Passed(&audio_frame), rtp_timestamp,
+ callback));
+}
+
+void AudioReceiver::ReturnDecodedFrameWithPlayoutDelay(
+ scoped_ptr<PcmAudioFrame> audio_frame, uint32 rtp_timestamp,
+ const AudioFrameDecodedCallback callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ cast_environment_->Logging()->InsertFrameEvent(kAudioFrameDecoded,
+ rtp_timestamp, kFrameIdUnknown);
+
+ base::TimeTicks now = cast_environment_->Clock()->NowTicks();
+ base::TimeTicks playout_time = GetPlayoutTime(now, rtp_timestamp);
+
+ cast_environment_->Logging()->InsertFrameEventWithDelay(kAudioPlayoutDelay,
+ rtp_timestamp, kFrameIdUnknown, playout_time - now);
+
+ // Frame is ready - Send back to the caller.
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(callback, base::Passed(&audio_frame), playout_time));
+}
+
+void AudioReceiver::PlayoutTimeout() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(audio_buffer_) << "Invalid function call in this configuration";
+ if (queued_encoded_callbacks_.empty()) {
+ // Already released by incoming packet.
+ return;
+ }
+ uint32 rtp_timestamp = 0;
+ bool next_frame = false;
+ scoped_ptr<EncodedAudioFrame> encoded_frame(new EncodedAudioFrame());
+
+ if (!audio_buffer_->GetEncodedAudioFrame(encoded_frame.get(),
+ &rtp_timestamp, &next_frame)) {
+ // We have no audio frames. Wait for new packet(s).
+ // Since the application can post multiple AudioFrameEncodedCallback and
+ // we only check the next frame to play out we might have multiple timeout
+ // events firing after each other; however this should be a rare event.
+ VLOG(2) << "Failed to retrieved a complete frame at this point in time";
+ return;
+ }
+
+ if (decryptor_ && !DecryptAudioFrame(&encoded_frame)) {
+ // Logging already done.
+ return;
+ }
+
+ if (PostEncodedAudioFrame(queued_encoded_callbacks_.front(), rtp_timestamp,
+ next_frame, &encoded_frame)) {
+ // Call succeed remove callback from list.
+ queued_encoded_callbacks_.pop_front();
+ }
+}
+
+void AudioReceiver::GetEncodedAudioFrame(
+ const AudioFrameEncodedCallback& callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(audio_buffer_) << "Invalid function call in this configuration";
+
+ uint32 rtp_timestamp = 0;
+ bool next_frame = false;
+ scoped_ptr<EncodedAudioFrame> encoded_frame(new EncodedAudioFrame());
+
+ if (!audio_buffer_->GetEncodedAudioFrame(encoded_frame.get(),
+ &rtp_timestamp, &next_frame)) {
+ // We have no audio frames. Wait for new packet(s).
+ VLOG(2) << "Wait for more audio packets in frame";
+ queued_encoded_callbacks_.push_back(callback);
+ return;
+ }
+ if (decryptor_ && !DecryptAudioFrame(&encoded_frame)) {
+ // Logging already done.
+ queued_encoded_callbacks_.push_back(callback);
+ return;
+ }
+ if (!PostEncodedAudioFrame(callback, rtp_timestamp, next_frame,
+ &encoded_frame)) {
+ // We have an audio frame; however we are missing packets and we have time
+ // to wait for new packet(s).
+ queued_encoded_callbacks_.push_back(callback);
+ }
+}
+
+bool AudioReceiver::PostEncodedAudioFrame(
+ const AudioFrameEncodedCallback& callback,
+ uint32 rtp_timestamp,
+ bool next_frame,
+ scoped_ptr<EncodedAudioFrame>* encoded_frame) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(audio_buffer_) << "Invalid function call in this configuration";
+
+ base::TimeTicks now = cast_environment_->Clock()->NowTicks();
+ base::TimeTicks playout_time = GetPlayoutTime(now, rtp_timestamp);
+ base::TimeDelta time_until_playout = playout_time - now;
+ base::TimeDelta min_wait_delta =
+ base::TimeDelta::FromMilliseconds(kMaxAudioFrameWaitMs);
+
+ if (!next_frame && (time_until_playout > min_wait_delta)) {
+ base::TimeDelta time_until_release = time_until_playout - min_wait_delta;
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&AudioReceiver::PlayoutTimeout, weak_factory_.GetWeakPtr()),
+ time_until_release);
+ VLOG(2) << "Wait until time to playout:"
+ << time_until_release.InMilliseconds();
+ return false;
+ }
+ (*encoded_frame)->codec = codec_;
+ audio_buffer_->ReleaseFrame((*encoded_frame)->frame_id);
+
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(callback, base::Passed(encoded_frame), playout_time));
+ return true;
+}
+
+void AudioReceiver::IncomingPacket(const uint8* packet, size_t length,
+ const base::Closure callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ bool rtcp_packet = Rtcp::IsRtcpPacket(packet, length);
+ if (!rtcp_packet) {
+ rtp_receiver_->ReceivedPacket(packet, length);
+ } else {
+ rtcp_->IncomingRtcpPacket(packet, length);
+ }
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE, callback);
+}
+
+void AudioReceiver::CastFeedback(const RtcpCastMessage& cast_message) {
+ // TODO(pwestin): add logging.
+ rtcp_->SendRtcpFromRtpReceiver(&cast_message, NULL);
+}
+
+base::TimeTicks AudioReceiver::GetPlayoutTime(base::TimeTicks now,
+ uint32 rtp_timestamp) {
+ base::TimeTicks playout_time;
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ // Senders time in ms when this frame was recorded.
+ // Note: the senders clock and our local clock might not be synced.
+ base::TimeTicks rtp_timestamp_in_ticks;
+ if (time_offset_ == base::TimeDelta()) {
+ if (rtcp_->RtpTimestampInSenderTime(frequency_,
+ first_incoming_rtp_timestamp_,
+ &rtp_timestamp_in_ticks)) {
+ time_offset_ = time_first_incoming_packet_ - rtp_timestamp_in_ticks;
+ } else {
+ // We have not received any RTCP to sync the stream play it out as soon as
+ // possible.
+ uint32 rtp_timestamp_diff = rtp_timestamp - first_incoming_rtp_timestamp_;
+
+ int frequency_khz = frequency_ / 1000;
+ base::TimeDelta rtp_time_diff_delta =
+ base::TimeDelta::FromMilliseconds(rtp_timestamp_diff / frequency_khz);
+ base::TimeDelta time_diff_delta = now - time_first_incoming_packet_;
+
+ playout_time = now + std::max(rtp_time_diff_delta - time_diff_delta,
+ base::TimeDelta());
+ }
+ }
+ if (!playout_time.is_null()) {
+ // This can fail if we have not received any RTCP packets in a long time.
+ playout_time = rtcp_->RtpTimestampInSenderTime(frequency_, rtp_timestamp,
+ &rtp_timestamp_in_ticks) ?
+ rtp_timestamp_in_ticks + time_offset_ + target_delay_delta_ : now;
+ }
+ // Don't allow the playout time to go backwards.
+ if (last_playout_time_ > playout_time)
+ playout_time = last_playout_time_;
+ last_playout_time_ = playout_time;
+ return playout_time;
+}
+
+bool AudioReceiver::DecryptAudioFrame(
+ scoped_ptr<EncodedAudioFrame>* audio_frame) {
+ DCHECK(decryptor_) << "Invalid state";
+
+ if (!decryptor_->SetCounter(GetAesNonce((*audio_frame)->frame_id,
+ iv_mask_))) {
+ NOTREACHED() << "Failed to set counter";
+ return false;
+ }
+ std::string decrypted_audio_data;
+ if (!decryptor_->Decrypt((*audio_frame)->data, &decrypted_audio_data)) {
+ LOG(ERROR) << "Decryption error";
+ // Give up on this frame, release it from jitter buffer.
+ audio_buffer_->ReleaseFrame((*audio_frame)->frame_id);
+ return false;
+ }
+ (*audio_frame)->data.swap(decrypted_audio_data);
+ return true;
+}
+
+void AudioReceiver::ScheduleNextRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeDelta time_to_send = rtcp_->TimeToSendNextRtcpReport() -
+ cast_environment_->Clock()->NowTicks();
+
+ time_to_send = std::max(time_to_send,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&AudioReceiver::SendNextRtcpReport,
+ weak_factory_.GetWeakPtr()), time_to_send);
+}
+
+void AudioReceiver::SendNextRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ // TODO(pwestin): add logging.
+ rtcp_->SendRtcpFromRtpReceiver(NULL, NULL);
+ ScheduleNextRtcpReport();
+}
+
+// Cast messages should be sent within a maximum interval. Schedule a call
+// if not triggered elsewhere, e.g. by the cast message_builder.
+void AudioReceiver::ScheduleNextCastMessage() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeTicks send_time;
+ if (audio_buffer_) {
+ audio_buffer_->TimeToSendNextCastMessage(&send_time);
+ } else if (audio_decoder_) {
+ audio_decoder_->TimeToSendNextCastMessage(&send_time);
+ } else {
+ NOTREACHED();
+ }
+ base::TimeDelta time_to_send = send_time -
+ cast_environment_->Clock()->NowTicks();
+ time_to_send = std::max(time_to_send,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&AudioReceiver::SendNextCastMessage,
+ weak_factory_.GetWeakPtr()), time_to_send);
+}
+
+void AudioReceiver::SendNextCastMessage() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+
+ if (audio_buffer_) {
+ // Will only send a message if it is time.
+ audio_buffer_->SendCastMessage();
+ }
+ if (audio_decoder_) {
+ // Will only send a message if it is time.
+ audio_decoder_->SendCastMessage();
+ }
+ ScheduleNextCastMessage();
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/audio_sender/audio_sender.cc b/media/cast/audio_sender/audio_sender.cc
new file mode 100644
index 0000000..713f5b9
--- /dev/null
+++ b/media/cast/audio_sender/audio_sender.cc
@@ -0,0 +1,210 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/audio_sender/audio_sender.h"
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/cast/audio_sender/audio_encoder.h"
+#include "media/cast/cast_environment.h"
+#include "media/cast/net/rtp_sender/rtp_sender.h"
+#include "media/cast/rtcp/rtcp.h"
+
+namespace media {
+namespace cast {
+
+const int64 kMinSchedulingDelayMs = 1;
+
+class LocalRtcpAudioSenderFeedback : public RtcpSenderFeedback {
+ public:
+ explicit LocalRtcpAudioSenderFeedback(AudioSender* audio_sender)
+ : audio_sender_(audio_sender) {
+ }
+
+ virtual void OnReceivedCastFeedback(
+ const RtcpCastMessage& cast_feedback) OVERRIDE {
+ if (!cast_feedback.missing_frames_and_packets_.empty()) {
+ audio_sender_->ResendPackets(cast_feedback.missing_frames_and_packets_);
+ }
+ VLOG(2) << "Received audio ACK "
+ << static_cast<int>(cast_feedback.ack_frame_id_);
+ }
+
+ private:
+ AudioSender* audio_sender_;
+};
+
+class LocalRtpSenderStatistics : public RtpSenderStatistics {
+ public:
+ explicit LocalRtpSenderStatistics(RtpSender* rtp_sender)
+ : rtp_sender_(rtp_sender) {
+ }
+
+ virtual void GetStatistics(const base::TimeTicks& now,
+ RtcpSenderInfo* sender_info) OVERRIDE {
+ rtp_sender_->RtpStatistics(now, sender_info);
+ }
+
+ private:
+ RtpSender* rtp_sender_;
+};
+
+AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
+ const AudioSenderConfig& audio_config,
+ PacedPacketSender* const paced_packet_sender)
+ : cast_environment_(cast_environment),
+ rtp_sender_(cast_environment, &audio_config, NULL,
+ paced_packet_sender),
+ rtcp_feedback_(new LocalRtcpAudioSenderFeedback(this)),
+ rtp_audio_sender_statistics_(
+ new LocalRtpSenderStatistics(&rtp_sender_)),
+ rtcp_(cast_environment,
+ rtcp_feedback_.get(),
+ paced_packet_sender,
+ rtp_audio_sender_statistics_.get(),
+ NULL,
+ audio_config.rtcp_mode,
+ base::TimeDelta::FromMilliseconds(audio_config.rtcp_interval),
+ audio_config.sender_ssrc,
+ audio_config.incoming_feedback_ssrc,
+ audio_config.rtcp_c_name),
+ initialized_(false),
+ weak_factory_(this) {
+ if (audio_config.aes_iv_mask.size() == kAesKeySize &&
+ audio_config.aes_key.size() == kAesKeySize) {
+ iv_mask_ = audio_config.aes_iv_mask;
+ crypto::SymmetricKey* key = crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES, audio_config.aes_key);
+ encryptor_.reset(new crypto::Encryptor());
+ encryptor_->Init(key, crypto::Encryptor::CTR, std::string());
+ } else if (audio_config.aes_iv_mask.size() != 0 ||
+ audio_config.aes_key.size() != 0) {
+ DCHECK(false) << "Invalid crypto configuration";
+ }
+ if (!audio_config.use_external_encoder) {
+ audio_encoder_ = new AudioEncoder(
+ cast_environment, audio_config,
+ base::Bind(&AudioSender::SendEncodedAudioFrame,
+ weak_factory_.GetWeakPtr()));
+ }
+}
+
+AudioSender::~AudioSender() {}
+
+void AudioSender::InitializeTimers() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (!initialized_) {
+ initialized_ = true;
+ ScheduleNextRtcpReport();
+ }
+}
+
+void AudioSender::InsertAudio(const AudioBus* audio_bus,
+ const base::TimeTicks& recorded_time,
+ const base::Closure& done_callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(audio_encoder_.get()) << "Invalid internal state";
+ // TODO(mikhal): Resolve calculation of the audio rtp_timestamp for logging.
+ // This is a tmp solution to allow the code to build.
+ cast_environment_->Logging()->InsertFrameEvent(kAudioFrameReceived,
+ GetVideoRtpTimestamp(recorded_time), kFrameIdUnknown);
+ audio_encoder_->InsertAudio(audio_bus, recorded_time, done_callback);
+}
+
+void AudioSender::InsertCodedAudioFrame(const EncodedAudioFrame* audio_frame,
+ const base::TimeTicks& recorded_time,
+ const base::Closure callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(audio_encoder_.get() == NULL) << "Invalid internal state";
+
+ cast_environment_->Logging()->InsertFrameEvent(kAudioFrameReceived,
+ GetVideoRtpTimestamp(recorded_time), kFrameIdUnknown);
+
+ if (encryptor_) {
+ EncodedAudioFrame encrypted_frame;
+ if (!EncryptAudioFrame(*audio_frame, &encrypted_frame)) {
+ // Logging already done.
+ return;
+ }
+ rtp_sender_.IncomingEncodedAudioFrame(&encrypted_frame, recorded_time);
+ } else {
+ rtp_sender_.IncomingEncodedAudioFrame(audio_frame, recorded_time);
+ }
+ callback.Run();
+}
+
+void AudioSender::SendEncodedAudioFrame(
+ scoped_ptr<EncodedAudioFrame> audio_frame,
+ const base::TimeTicks& recorded_time) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ InitializeTimers();
+ if (encryptor_) {
+ EncodedAudioFrame encrypted_frame;
+ if (!EncryptAudioFrame(*audio_frame.get(), &encrypted_frame)) {
+ // Logging already done.
+ return;
+ }
+ rtp_sender_.IncomingEncodedAudioFrame(&encrypted_frame, recorded_time);
+ } else {
+ rtp_sender_.IncomingEncodedAudioFrame(audio_frame.get(), recorded_time);
+ }
+}
+
+bool AudioSender::EncryptAudioFrame(const EncodedAudioFrame& audio_frame,
+ EncodedAudioFrame* encrypted_frame) {
+ DCHECK(encryptor_) << "Invalid state";
+
+ if (!encryptor_->SetCounter(GetAesNonce(audio_frame.frame_id, iv_mask_))) {
+ NOTREACHED() << "Failed to set counter";
+ return false;
+ }
+ if (!encryptor_->Encrypt(audio_frame.data, &encrypted_frame->data)) {
+ NOTREACHED() << "Encrypt error";
+ return false;
+ }
+ encrypted_frame->codec = audio_frame.codec;
+ encrypted_frame->frame_id = audio_frame.frame_id;
+ encrypted_frame->samples = audio_frame.samples;
+ return true;
+}
+
+void AudioSender::ResendPackets(
+ const MissingFramesAndPacketsMap& missing_frames_and_packets) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ rtp_sender_.ResendPackets(missing_frames_and_packets);
+}
+
+void AudioSender::IncomingRtcpPacket(const uint8* packet, size_t length,
+ const base::Closure callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ rtcp_.IncomingRtcpPacket(packet, length);
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE, callback);
+}
+
+void AudioSender::ScheduleNextRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeDelta time_to_next =
+ rtcp_.TimeToSendNextRtcpReport() - cast_environment_->Clock()->NowTicks();
+
+ time_to_next = std::max(time_to_next,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&AudioSender::SendRtcpReport, weak_factory_.GetWeakPtr()),
+ time_to_next);
+}
+
+void AudioSender::SendRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ // We don't send audio logging messages since all captured audio frames will
+ // be sent.
+ rtcp_.SendRtcpFromRtpSender(NULL);
+ ScheduleNextRtcpReport();
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/framer/cast_message_builder.cc b/media/cast/framer/cast_message_builder.cc
new file mode 100644
index 0000000..ef864ba
--- /dev/null
+++ b/media/cast/framer/cast_message_builder.cc
@@ -0,0 +1,192 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/framer/cast_message_builder.h"
+
+#include "media/cast/cast_defines.h"
+
+namespace media {
+namespace cast {
+
+CastMessageBuilder::CastMessageBuilder(
+ base::TickClock* clock,
+ RtpPayloadFeedback* incoming_payload_feedback,
+ FrameIdMap* frame_id_map,
+ uint32 media_ssrc,
+ bool decoder_faster_than_max_frame_rate,
+ int max_unacked_frames)
+ : clock_(clock),
+ cast_feedback_(incoming_payload_feedback),
+ frame_id_map_(frame_id_map),
+ media_ssrc_(media_ssrc),
+ decoder_faster_than_max_frame_rate_(decoder_faster_than_max_frame_rate),
+ max_unacked_frames_(max_unacked_frames),
+ cast_msg_(media_ssrc),
+ waiting_for_key_frame_(true),
+ slowing_down_ack_(false),
+ acked_last_frame_(true),
+ last_acked_frame_id_(kStartFrameId) {
+ cast_msg_.ack_frame_id_ = kStartFrameId;
+}
+
+CastMessageBuilder::~CastMessageBuilder() {}
+
+void CastMessageBuilder::CompleteFrameReceived(uint32 frame_id,
+ bool is_key_frame) {
+ if (last_update_time_.is_null()) {
+ // Our first update.
+ last_update_time_ = clock_->NowTicks();
+ }
+ if (waiting_for_key_frame_) {
+ if (!is_key_frame) {
+ // Ignore that we have received this complete frame since we are
+ // waiting on a key frame.
+ return;
+ }
+ waiting_for_key_frame_ = false;
+ cast_msg_.missing_frames_and_packets_.clear();
+ cast_msg_.ack_frame_id_ = frame_id;
+ last_update_time_ = clock_->NowTicks();
+ // We might have other complete frames waiting after we receive the last
+ // packet in the key-frame.
+ UpdateAckMessage();
+ } else {
+ if (!UpdateAckMessage()) return;
+
+ BuildPacketList();
+ }
+ // Send cast message.
+ VLOG(2) << "Send cast message Ack:" << static_cast<int>(frame_id);
+ cast_feedback_->CastFeedback(cast_msg_);
+}
+
+bool CastMessageBuilder::UpdateAckMessage() {
+ if (!decoder_faster_than_max_frame_rate_) {
+ int complete_frame_count = frame_id_map_->NumberOfCompleteFrames();
+ if (complete_frame_count > max_unacked_frames_) {
+ // We have too many frames pending in our framer; slow down ACK.
+ slowing_down_ack_ = true;
+ } else if (complete_frame_count <= 1) {
+ // We are down to one or less frames in our framer; ACK normally.
+ slowing_down_ack_ = false;
+ }
+ }
+ if (slowing_down_ack_) {
+ // We are slowing down acknowledgment by acknowledging every other frame.
+ if (acked_last_frame_) {
+ acked_last_frame_ = false;
+ } else {
+ acked_last_frame_ = true;
+ last_acked_frame_id_++;
+ // Note: frame skipping and slowdown ACK is not supported at the same
+ // time; and it's not needed since we can skip frames to catch up.
+ }
+ } else {
+ uint32 frame_id = frame_id_map_->LastContinuousFrame();
+
+ // Is it a new frame?
+ if (last_acked_frame_id_ == frame_id) return false;
+
+ last_acked_frame_id_ = frame_id;
+ acked_last_frame_ = true;
+ }
+ cast_msg_.ack_frame_id_ = last_acked_frame_id_;
+ cast_msg_.missing_frames_and_packets_.clear();
+ last_update_time_ = clock_->NowTicks();
+ return true;
+}
+
+bool CastMessageBuilder::TimeToSendNextCastMessage(
+ base::TimeTicks* time_to_send) {
+ // We haven't received any packets.
+ if (last_update_time_.is_null() && frame_id_map_->Empty()) return false;
+
+ *time_to_send = last_update_time_ +
+ base::TimeDelta::FromMilliseconds(kCastMessageUpdateIntervalMs);
+ return true;
+}
+
+void CastMessageBuilder::UpdateCastMessage() {
+ RtcpCastMessage message(media_ssrc_);
+ if (!UpdateCastMessageInternal(&message)) return;
+
+ // Send cast message.
+ cast_feedback_->CastFeedback(message);
+}
+
+void CastMessageBuilder::Reset() {
+ waiting_for_key_frame_ = true;
+ cast_msg_.ack_frame_id_ = kStartFrameId;
+ cast_msg_.missing_frames_and_packets_.clear();
+ time_last_nacked_map_.clear();
+}
+
+bool CastMessageBuilder::UpdateCastMessageInternal(RtcpCastMessage* message) {
+ if (last_update_time_.is_null()) {
+ if (!frame_id_map_->Empty()) {
+ // We have received packets.
+ last_update_time_ = clock_->NowTicks();
+ }
+ return false;
+ }
+ // Is it time to update the cast message?
+ base::TimeTicks now = clock_->NowTicks();
+ if (now - last_update_time_ <
+ base::TimeDelta::FromMilliseconds(kCastMessageUpdateIntervalMs)) {
+ return false;
+ }
+ last_update_time_ = now;
+
+ UpdateAckMessage(); // Needed to cover when a frame is skipped.
+ BuildPacketList();
+ *message = cast_msg_;
+ return true;
+}
+
+void CastMessageBuilder::BuildPacketList() {
+ base::TimeTicks now = clock_->NowTicks();
+
+ // Clear message NACK list.
+ cast_msg_.missing_frames_and_packets_.clear();
+
+ // Are we missing packets?
+ if (frame_id_map_->Empty()) return;
+
+ uint32 newest_frame_id = frame_id_map_->NewestFrameId();
+ uint32 next_expected_frame_id = cast_msg_.ack_frame_id_ + 1;
+
+ // Iterate over all frames.
+ for (; !IsNewerFrameId(next_expected_frame_id, newest_frame_id);
+ ++next_expected_frame_id) {
+ TimeLastNackMap::iterator it =
+ time_last_nacked_map_.find(next_expected_frame_id);
+ if (it != time_last_nacked_map_.end()) {
+ // We have sent a NACK in this frame before, make sure enough time have
+ // passed.
+ if (now - it->second <
+ base::TimeDelta::FromMilliseconds(kNackRepeatIntervalMs)) {
+ continue;
+ }
+ }
+
+ PacketIdSet missing;
+ if (frame_id_map_->FrameExists(next_expected_frame_id)) {
+ bool last_frame = (newest_frame_id == next_expected_frame_id);
+ frame_id_map_->GetMissingPackets(next_expected_frame_id, last_frame,
+ &missing);
+ if (!missing.empty()) {
+ time_last_nacked_map_[next_expected_frame_id] = now;
+ cast_msg_.missing_frames_and_packets_.insert(
+ std::make_pair(next_expected_frame_id, missing));
+ }
+ } else {
+ time_last_nacked_map_[next_expected_frame_id] = now;
+ missing.insert(kRtcpCastAllPacketsLost);
+ cast_msg_.missing_frames_and_packets_[next_expected_frame_id] = missing;
+ }
+ }
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/framer/frame_id_map.cc b/media/cast/framer/frame_id_map.cc
new file mode 100644
index 0000000..64c2284
--- /dev/null
+++ b/media/cast/framer/frame_id_map.cc
@@ -0,0 +1,256 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/framer/frame_id_map.h"
+
+#include "base/logging.h"
+#include "media/cast/rtp_receiver/rtp_receiver_defines.h"
+
+namespace media {
+namespace cast {
+
+FrameInfo::FrameInfo(uint32 frame_id,
+ uint32 referenced_frame_id,
+ uint16 max_packet_id,
+ bool key_frame)
+ : is_key_frame_(key_frame),
+ frame_id_(frame_id),
+ referenced_frame_id_(referenced_frame_id),
+ max_received_packet_id_(0) {
+ if (max_packet_id > 0) {
+ // Create the set with all packets missing.
+ for (uint16 i = 0; i <= max_packet_id; i++) {
+ missing_packets_.insert(i);
+ }
+ }
+}
+
+FrameInfo::~FrameInfo() {}
+
+bool FrameInfo::InsertPacket(uint16 packet_id) {
+ // Update the last received packet id.
+ if (IsNewerPacketId(packet_id, max_received_packet_id_)) {
+ max_received_packet_id_ = packet_id;
+ }
+ missing_packets_.erase(packet_id);
+ return missing_packets_.empty();
+}
+
+bool FrameInfo::Complete() const {
+ return missing_packets_.empty();
+}
+
+void FrameInfo::GetMissingPackets(bool newest_frame,
+ PacketIdSet* missing_packets) const {
+ if (newest_frame) {
+ // Missing packets capped by max_received_packet_id_.
+ PacketIdSet::const_iterator it_after_last_received =
+ missing_packets_.lower_bound(max_received_packet_id_);
+ missing_packets->insert(missing_packets_.begin(), it_after_last_received);
+ } else {
+ missing_packets->insert(missing_packets_.begin(), missing_packets_.end());
+ }
+}
+
+
+FrameIdMap::FrameIdMap()
+ : waiting_for_key_(true),
+ last_released_frame_(kStartFrameId),
+ newest_frame_id_(kStartFrameId) {
+}
+
+FrameIdMap::~FrameIdMap() {}
+
+bool FrameIdMap::InsertPacket(const RtpCastHeader& rtp_header, bool* complete) {
+ uint32 frame_id = rtp_header.frame_id;
+ uint32 reference_frame_id;
+ if (rtp_header.is_reference) {
+ reference_frame_id = rtp_header.reference_frame_id;
+ } else {
+ reference_frame_id = static_cast<uint32>(frame_id - 1);
+ }
+
+ if (rtp_header.is_key_frame && waiting_for_key_) {
+ last_released_frame_ = static_cast<uint32>(frame_id - 1);
+ waiting_for_key_ = false;
+ }
+
+ VLOG(3) << "InsertPacket frame:" << frame_id
+ << " packet:" << static_cast<int>(rtp_header.packet_id)
+ << " max packet:" << static_cast<int>(rtp_header.max_packet_id);
+
+ if (IsOlderFrameId(frame_id, last_released_frame_) && !waiting_for_key_) {
+ return false;
+ }
+
+ // Update the last received frame id.
+ if (IsNewerFrameId(frame_id, newest_frame_id_)) {
+ newest_frame_id_ = frame_id;
+ }
+
+ // Does this packet belong to a new frame?
+ FrameMap::iterator it = frame_map_.find(frame_id);
+ if (it == frame_map_.end()) {
+ // New frame.
+ linked_ptr<FrameInfo> frame_info(new FrameInfo(frame_id,
+ reference_frame_id,
+ rtp_header.max_packet_id,
+ rtp_header.is_key_frame));
+ std::pair<FrameMap::iterator, bool> retval =
+ frame_map_.insert(std::make_pair(frame_id, frame_info));
+
+ *complete = retval.first->second->InsertPacket(rtp_header.packet_id);
+ } else {
+ // Insert packet to existing frame.
+ *complete = it->second->InsertPacket(rtp_header.packet_id);
+ }
+ return true;
+}
+
+void FrameIdMap::RemoveOldFrames(uint32 frame_id) {
+ FrameMap::iterator it = frame_map_.begin();
+
+ while (it != frame_map_.end()) {
+ if (IsNewerFrameId(it->first, frame_id)) {
+ ++it;
+ } else {
+ // Older or equal; erase.
+ frame_map_.erase(it++);
+ }
+ }
+ last_released_frame_ = frame_id;
+}
+
+void FrameIdMap::Clear() {
+ frame_map_.clear();
+ waiting_for_key_ = true;
+ last_released_frame_ = kStartFrameId;
+ newest_frame_id_ = kStartFrameId;
+}
+
+uint32 FrameIdMap::NewestFrameId() const {
+ return newest_frame_id_;
+}
+
+bool FrameIdMap::NextContinuousFrame(uint32* frame_id) const {
+ FrameMap::const_iterator it;
+
+ for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
+ if (it->second->Complete() && ContinuousFrame(it->second.get())) {
+ *frame_id = it->first;
+ return true;
+ }
+ }
+ return false;
+}
+
+uint32 FrameIdMap::LastContinuousFrame() const {
+ uint32 last_continuous_frame_id = last_released_frame_;
+ uint32 next_expected_frame = last_released_frame_;
+
+ FrameMap::const_iterator it;
+
+ do {
+ next_expected_frame++;
+ it = frame_map_.find(next_expected_frame);
+ if (it == frame_map_.end()) break;
+ if (!it->second->Complete()) break;
+
+ // We found the next continuous frame.
+ last_continuous_frame_id = it->first;
+ } while (next_expected_frame != newest_frame_id_);
+ return last_continuous_frame_id;
+}
+
+bool FrameIdMap::NextAudioFrameAllowingMissingFrames(uint32* frame_id) const {
+ // First check if we have continuous frames.
+ if (NextContinuousFrame(frame_id)) return true;
+
+ // Find the oldest frame.
+ FrameMap::const_iterator it_best_match = frame_map_.end();
+ FrameMap::const_iterator it;
+
+ // Find first complete frame.
+ for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
+ if (it->second->Complete()) {
+ it_best_match = it;
+ break;
+ }
+ }
+ if (it_best_match == frame_map_.end()) return false; // No complete frame.
+
+ ++it;
+ for (; it != frame_map_.end(); ++it) {
+ if (it->second->Complete() &&
+ IsOlderFrameId(it->first, it_best_match->first)) {
+ it_best_match = it;
+ }
+ }
+ *frame_id = it_best_match->first;
+ return true;
+}
+
+bool FrameIdMap::NextVideoFrameAllowingSkippingFrames(uint32* frame_id) const {
+ // Find the oldest decodable frame.
+ FrameMap::const_iterator it_best_match = frame_map_.end();
+ FrameMap::const_iterator it;
+ for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
+ if (it->second->Complete() && DecodableVideoFrame(it->second.get())) {
+ it_best_match = it;
+ }
+ }
+ if (it_best_match == frame_map_.end()) return false;
+
+ *frame_id = it_best_match->first;
+ return true;
+}
+
+bool FrameIdMap::Empty() const {
+ return frame_map_.empty();
+}
+
+int FrameIdMap::NumberOfCompleteFrames() const {
+ int count = 0;
+ FrameMap::const_iterator it;
+ for (it = frame_map_.begin(); it != frame_map_.end(); ++it) {
+ if (it->second->Complete()) {
+ ++count;
+ }
+ }
+ return count;
+}
+
+bool FrameIdMap::FrameExists(uint32 frame_id) const {
+ return frame_map_.end() != frame_map_.find(frame_id);
+}
+
+void FrameIdMap::GetMissingPackets(uint32 frame_id,
+ bool last_frame,
+ PacketIdSet* missing_packets) const {
+ FrameMap::const_iterator it = frame_map_.find(frame_id);
+ if (it == frame_map_.end()) return;
+
+ it->second->GetMissingPackets(last_frame, missing_packets);
+}
+
+bool FrameIdMap::ContinuousFrame(FrameInfo* frame) const {
+ DCHECK(frame);
+ if (waiting_for_key_ && !frame->is_key_frame()) return false;
+ return static_cast<uint32>(last_released_frame_ + 1) == frame->frame_id();
+}
+
+bool FrameIdMap::DecodableVideoFrame(FrameInfo* frame) const {
+ if (frame->is_key_frame()) return true;
+ if (waiting_for_key_ && !frame->is_key_frame()) return false;
+
+ // Current frame is not necessarily referencing the last frame.
+ // Do we have the reference frame?
+ if (IsOlderFrameId(frame->referenced_frame_id(), last_released_frame_)) {
+ return true;
+ }
+ return frame->referenced_frame_id() == last_released_frame_;
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/framer/framer.cc b/media/cast/framer/framer.cc
new file mode 100644
index 0000000..e8464c1
--- /dev/null
+++ b/media/cast/framer/framer.cc
@@ -0,0 +1,139 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/framer/framer.h"
+
+#include "base/logging.h"
+
+namespace media {
+namespace cast {
+
+typedef FrameList::const_iterator ConstFrameIterator;
+
+Framer::Framer(base::TickClock* clock,
+ RtpPayloadFeedback* incoming_payload_feedback,
+ uint32 ssrc,
+ bool decoder_faster_than_max_frame_rate,
+ int max_unacked_frames)
+ : decoder_faster_than_max_frame_rate_(decoder_faster_than_max_frame_rate),
+ cast_msg_builder_(new CastMessageBuilder(clock, incoming_payload_feedback,
+ &frame_id_map_, ssrc, decoder_faster_than_max_frame_rate,
+ max_unacked_frames)) {
+ DCHECK(incoming_payload_feedback) << "Invalid argument";
+}
+
+Framer::~Framer() {}
+
+bool Framer::InsertPacket(const uint8* payload_data,
+ size_t payload_size,
+ const RtpCastHeader& rtp_header) {
+ bool complete = false;
+ if (!frame_id_map_.InsertPacket(rtp_header, &complete)) return false;
+
+ // Does this packet belong to a new frame?
+ FrameList::iterator it = frames_.find(rtp_header.frame_id);
+ if (it == frames_.end()) {
+ // New frame.
+ linked_ptr<FrameBuffer> frame_buffer(new FrameBuffer());
+ frame_buffer->InsertPacket(payload_data, payload_size, rtp_header);
+ frames_.insert(std::make_pair(rtp_header.frame_id, frame_buffer));
+ } else {
+ // Insert packet to existing frame buffer.
+ it->second->InsertPacket(payload_data, payload_size, rtp_header);
+ }
+
+ if (complete) {
+ // ACK as soon as possible.
+ VLOG(2) << "Complete frame " << static_cast<int>(rtp_header.frame_id);
+ cast_msg_builder_->CompleteFrameReceived(rtp_header.frame_id,
+ rtp_header.is_key_frame);
+ }
+ return complete;
+}
+
+// This does not release the frame.
+bool Framer::GetEncodedAudioFrame(EncodedAudioFrame* audio_frame,
+ uint32* rtp_timestamp,
+ bool* next_frame) {
+ uint32 frame_id;
+ // Find frame id.
+ if (frame_id_map_.NextContinuousFrame(&frame_id)) {
+ // We have our next frame.
+ *next_frame = true;
+ } else {
+ if (!frame_id_map_.NextAudioFrameAllowingMissingFrames(&frame_id)) {
+ return false;
+ }
+ *next_frame = false;
+ }
+
+ ConstFrameIterator it = frames_.find(frame_id);
+ DCHECK(it != frames_.end());
+ if (it == frames_.end()) return false;
+
+ return it->second->GetEncodedAudioFrame(audio_frame, rtp_timestamp);
+}
+
+// This does not release the frame.
+bool Framer::GetEncodedVideoFrame(EncodedVideoFrame* video_frame,
+ uint32* rtp_timestamp,
+ bool* next_frame) {
+ uint32 frame_id;
+ // Find frame id.
+ if (frame_id_map_.NextContinuousFrame(&frame_id)) {
+ // We have our next frame.
+ *next_frame = true;
+ } else {
+ // Check if we can skip frames when our decoder is too slow.
+ if (!decoder_faster_than_max_frame_rate_) return false;
+
+ if (!frame_id_map_.NextVideoFrameAllowingSkippingFrames(&frame_id)) {
+ return false;
+ }
+ *next_frame = false;
+ }
+
+ ConstFrameIterator it = frames_.find(frame_id);
+ DCHECK(it != frames_.end());
+ if (it == frames_.end()) return false;
+
+ return it->second->GetEncodedVideoFrame(video_frame, rtp_timestamp);
+}
+
+void Framer::Reset() {
+ frame_id_map_.Clear();
+ frames_.clear();
+ cast_msg_builder_->Reset();
+}
+
+void Framer::ReleaseFrame(uint32 frame_id) {
+ frame_id_map_.RemoveOldFrames(frame_id);
+ frames_.erase(frame_id);
+
+ // We have a frame - remove all frames with lower frame id.
+ bool skipped_old_frame = false;
+ FrameList::iterator it;
+ for (it = frames_.begin(); it != frames_.end(); ) {
+ if (IsOlderFrameId(it->first, frame_id)) {
+ frames_.erase(it++);
+ skipped_old_frame = true;
+ } else {
+ ++it;
+ }
+ }
+ if (skipped_old_frame) {
+ cast_msg_builder_->UpdateCastMessage();
+ }
+}
+
+bool Framer::TimeToSendNextCastMessage(base::TimeTicks* time_to_send) {
+ return cast_msg_builder_->TimeToSendNextCastMessage(time_to_send);
+}
+
+void Framer::SendCastMessage() {
+ cast_msg_builder_->UpdateCastMessage();
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/net/rtp_sender/packet_storage/packet_storage.cc b/media/cast/net/rtp_sender/packet_storage/packet_storage.cc
new file mode 100644
index 0000000..0111a99
--- /dev/null
+++ b/media/cast/net/rtp_sender/packet_storage/packet_storage.cc
@@ -0,0 +1,174 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/net/rtp_sender/packet_storage/packet_storage.h"
+
+#include <string>
+
+#include "base/logging.h"
+#include "media/cast/cast_defines.h"
+
+namespace media {
+namespace cast {
+
+// Limit the max time delay to avoid frame id wrap around; 256 / 60 fps.
+const int kMaxAllowedTimeStoredMs = 4000;
+
+typedef PacketMap::iterator PacketMapIterator;
+typedef TimeToPacketMap::iterator TimeToPacketIterator;
+
+class StoredPacket {
+ public:
+ StoredPacket() {
+ packet_.reserve(kIpPacketSize);
+ }
+
+ void Save(const Packet* packet) {
+ DCHECK_LT(packet->size(), kIpPacketSize) << "Invalid argument";
+ packet_.clear();
+ packet_.insert(packet_.begin(), packet->begin(), packet->end());
+ }
+
+ void GetCopy(PacketList* packets) {
+ packets->push_back(Packet(packet_.begin(), packet_.end()));
+ }
+
+ private:
+ Packet packet_;
+};
+
+PacketStorage::PacketStorage(base::TickClock* clock,
+ int max_time_stored_ms)
+ : clock_(clock) {
+ max_time_stored_ = base::TimeDelta::FromMilliseconds(max_time_stored_ms);
+ DCHECK_LE(max_time_stored_ms, kMaxAllowedTimeStoredMs) << "Invalid argument";
+}
+
+PacketStorage::~PacketStorage() {
+ time_to_packet_map_.clear();
+
+ PacketMapIterator store_it = stored_packets_.begin();
+ for (; store_it != stored_packets_.end();
+ store_it = stored_packets_.begin()) {
+ stored_packets_.erase(store_it);
+ }
+ while (!free_packets_.empty()) {
+ free_packets_.pop_front();
+ }
+}
+
+void PacketStorage::CleanupOldPackets(base::TimeTicks now) {
+ TimeToPacketIterator time_it = time_to_packet_map_.begin();
+
+ // Check max size.
+ while (time_to_packet_map_.size() >= kMaxStoredPackets) {
+ PacketMapIterator store_it = stored_packets_.find(time_it->second);
+
+ // We should always find the packet.
+ DCHECK(store_it != stored_packets_.end()) << "Invalid state";
+ time_to_packet_map_.erase(time_it);
+ // Save the pointer.
+ linked_ptr<StoredPacket> storted_packet = store_it->second;
+ stored_packets_.erase(store_it);
+ // Add this packet to the free list for later re-use.
+ free_packets_.push_back(storted_packet);
+ time_it = time_to_packet_map_.begin();
+ }
+
+ // Time out old packets.
+ while (time_it != time_to_packet_map_.end()) {
+ if (now < time_it->first + max_time_stored_) {
+ break;
+ }
+ // Packet too old.
+ PacketMapIterator store_it = stored_packets_.find(time_it->second);
+
+ // We should always find the packet.
+ DCHECK(store_it != stored_packets_.end()) << "Invalid state";
+ time_to_packet_map_.erase(time_it);
+ // Save the pointer.
+ linked_ptr<StoredPacket> storted_packet = store_it->second;
+ stored_packets_.erase(store_it);
+ // Add this packet to the free list for later re-use.
+ free_packets_.push_back(storted_packet);
+ time_it = time_to_packet_map_.begin();
+ }
+}
+
+void PacketStorage::StorePacket(uint32 frame_id, uint16 packet_id,
+ const Packet* packet) {
+ base::TimeTicks now = clock_->NowTicks();
+ CleanupOldPackets(now);
+
+ // Internally we only use the 8 LSB of the frame id.
+ uint32 index = ((0xff & frame_id) << 16) + packet_id;
+ PacketMapIterator it = stored_packets_.find(index);
+ if (it != stored_packets_.end()) {
+ // We have already saved this.
+ DCHECK(false) << "Invalid state";
+ return;
+ }
+ linked_ptr<StoredPacket> stored_packet;
+ if (free_packets_.empty()) {
+ // No previous allocated packets allocate one.
+ stored_packet.reset(new StoredPacket());
+ } else {
+ // Re-use previous allocated packet.
+ stored_packet = free_packets_.front();
+ free_packets_.pop_front();
+ }
+ stored_packet->Save(packet);
+ stored_packets_[index] = stored_packet;
+ time_to_packet_map_.insert(std::make_pair(now, index));
+}
+
+PacketList PacketStorage::GetPackets(
+ const MissingFramesAndPacketsMap& missing_frames_and_packets) {
+ PacketList packets_to_resend;
+
+ // Iterate over all frames in the list.
+ for (MissingFramesAndPacketsMap::const_iterator it =
+ missing_frames_and_packets.begin();
+ it != missing_frames_and_packets.end(); ++it) {
+ uint8 frame_id = it->first;
+ const PacketIdSet& packets_set = it->second;
+ bool success = false;
+
+ if (packets_set.empty()) {
+ VLOG(3) << "Missing all packets in frame " << static_cast<int>(frame_id);
+
+ uint16 packet_id = 0;
+ do {
+ // Get packet from storage.
+ success = GetPacket(frame_id, packet_id, &packets_to_resend);
+ ++packet_id;
+ } while (success);
+ } else {
+ // Iterate over all of the packets in the frame.
+ for (PacketIdSet::const_iterator set_it = packets_set.begin();
+ set_it != packets_set.end(); ++set_it) {
+ GetPacket(frame_id, *set_it, &packets_to_resend);
+ }
+ }
+ }
+ return packets_to_resend;
+}
+
+bool PacketStorage::GetPacket(uint8 frame_id,
+ uint16 packet_id,
+ PacketList* packets) {
+ // Internally we only use the 8 LSB of the frame id.
+ uint32 index = (static_cast<uint32>(frame_id) << 16) + packet_id;
+ PacketMapIterator it = stored_packets_.find(index);
+ if (it == stored_packets_.end()) {
+ return false;
+ }
+ it->second->GetCopy(packets);
+ VLOG(3) << "Resend " << static_cast<int>(frame_id)
+ << ":" << packet_id;
+ return true;
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/net/rtp_sender/rtp_sender.cc b/media/cast/net/rtp_sender/rtp_sender.cc
new file mode 100644
index 0000000..1f47648
--- /dev/null
+++ b/media/cast/net/rtp_sender/rtp_sender.cc
@@ -0,0 +1,145 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/net/rtp_sender/rtp_sender.h"
+
+#include "base/logging.h"
+#include "base/rand_util.h"
+#include "media/cast/cast_defines.h"
+#include "media/cast/net/pacing/paced_sender.h"
+#include "media/cast/rtcp/rtcp_defines.h"
+#include "net/base/big_endian.h"
+
+namespace media {
+namespace cast {
+
+RtpSender::RtpSender(scoped_refptr<CastEnvironment> cast_environment,
+ const AudioSenderConfig* audio_config,
+ const VideoSenderConfig* video_config,
+ PacedPacketSender* transport)
+ : cast_environment_(cast_environment),
+ config_(),
+ transport_(transport) {
+ // Store generic cast config and create packetizer config.
+ DCHECK(audio_config || video_config) << "Invalid argument";
+ if (audio_config) {
+ storage_.reset(new PacketStorage(cast_environment->Clock(),
+ audio_config->rtp_history_ms));
+ config_.audio = true;
+ config_.ssrc = audio_config->sender_ssrc;
+ config_.payload_type = audio_config->rtp_payload_type;
+ config_.frequency = audio_config->frequency;
+ config_.audio_codec = audio_config->codec;
+ } else {
+ storage_.reset(new PacketStorage(cast_environment->Clock(),
+ video_config->rtp_history_ms));
+ config_.audio = false;
+ config_.ssrc = video_config->sender_ssrc;
+ config_.payload_type = video_config->rtp_payload_type;
+ config_.frequency = kVideoFrequency;
+ config_.video_codec = video_config->codec;
+ }
+ // Randomly set start values.
+ config_.sequence_number = base::RandInt(0, 65535);
+ config_.rtp_timestamp = base::RandInt(0, 65535);
+ config_.rtp_timestamp += base::RandInt(0, 65535) << 16;
+ packetizer_.reset(new RtpPacketizer(transport, storage_.get(), config_));
+}
+
+RtpSender::~RtpSender() {}
+
+void RtpSender::IncomingEncodedVideoFrame(const EncodedVideoFrame* video_frame,
+ const base::TimeTicks& capture_time) {
+ packetizer_->IncomingEncodedVideoFrame(video_frame, capture_time);
+}
+
+void RtpSender::IncomingEncodedAudioFrame(const EncodedAudioFrame* audio_frame,
+ const base::TimeTicks& recorded_time) {
+ packetizer_->IncomingEncodedAudioFrame(audio_frame, recorded_time);
+}
+
+void RtpSender::ResendPackets(
+ const MissingFramesAndPacketsMap& missing_frames_and_packets) {
+ // Iterate over all frames in the list.
+ for (MissingFramesAndPacketsMap::const_iterator it =
+ missing_frames_and_packets.begin();
+ it != missing_frames_and_packets.end(); ++it) {
+ PacketList packets_to_resend;
+ uint8 frame_id = it->first;
+ const PacketIdSet& packets_set = it->second;
+ bool success = false;
+
+ if (packets_set.empty()) {
+ VLOG(3) << "Missing all packets in frame " << static_cast<int>(frame_id);
+
+ uint16 packet_id = 0;
+ do {
+ // Get packet from storage.
+ success = storage_->GetPacket(frame_id, packet_id, &packets_to_resend);
+
+ // Resend packet to the network.
+ if (success) {
+ VLOG(3) << "Resend " << static_cast<int>(frame_id)
+ << ":" << packet_id;
+ // Set a unique incremental sequence number for every packet.
+ Packet& packet = packets_to_resend.back();
+ UpdateSequenceNumber(&packet);
+ // Set the size as correspond to each frame.
+ ++packet_id;
+ }
+ } while (success);
+ } else {
+ // Iterate over all of the packets in the frame.
+ for (PacketIdSet::const_iterator set_it = packets_set.begin();
+ set_it != packets_set.end(); ++set_it) {
+ uint16 packet_id = *set_it;
+ success = storage_->GetPacket(frame_id, packet_id, &packets_to_resend);
+
+ // Resend packet to the network.
+ if (success) {
+ VLOG(3) << "Resend " << static_cast<int>(frame_id)
+ << ":" << packet_id;
+ Packet& packet = packets_to_resend.back();
+ UpdateSequenceNumber(&packet);
+ }
+ }
+ }
+ transport_->ResendPackets(packets_to_resend);
+ }
+}
+
+void RtpSender::UpdateSequenceNumber(Packet* packet) {
+ uint16 new_sequence_number = packetizer_->NextSequenceNumber();
+ int index = 2;
+ (*packet)[index] = (static_cast<uint8>(new_sequence_number));
+ (*packet)[index + 1] =(static_cast<uint8>(new_sequence_number >> 8));
+}
+
+void RtpSender::RtpStatistics(const base::TimeTicks& now,
+ RtcpSenderInfo* sender_info) {
+ // The timestamp of this Rtcp packet should be estimated as the timestamp of
+ // the frame being captured at this moment. We are calculating that
+ // timestamp as the last frame's timestamp + the time since the last frame
+ // was captured.
+ uint32 ntp_seconds = 0;
+ uint32 ntp_fraction = 0;
+ ConvertTimeTicksToNtp(now, &ntp_seconds, &ntp_fraction);
+ sender_info->ntp_seconds = ntp_seconds;
+ sender_info->ntp_fraction = ntp_fraction;
+
+ base::TimeTicks time_sent;
+ uint32 rtp_timestamp;
+ if (packetizer_->LastSentTimestamp(&time_sent, &rtp_timestamp)) {
+ base::TimeDelta time_since_last_send = now - time_sent;
+ sender_info->rtp_timestamp = rtp_timestamp +
+ time_since_last_send.InMilliseconds() * (config_.frequency / 1000);
+ } else {
+ sender_info->rtp_timestamp = 0;
+ }
+ sender_info->send_packet_count = packetizer_->send_packets_count();
+ sender_info->send_octet_count = packetizer_->send_octet_count();
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/rtcp/rtcp_sender_unittest.cc b/media/cast/rtcp/rtcp_sender_unittest.cc
new file mode 100644
index 0000000..7da1610
--- /dev/null
+++ b/media/cast/rtcp/rtcp_sender_unittest.cc
@@ -0,0 +1,590 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/memory/scoped_ptr.h"
+#include "base/test/simple_test_tick_clock.h"
+#include "media/cast/cast_defines.h"
+#include "media/cast/cast_environment.h"
+#include "media/cast/net/pacing/paced_sender.h"
+#include "media/cast/rtcp/rtcp_sender.h"
+#include "media/cast/rtcp/rtcp_utility.h"
+#include "media/cast/rtcp/test_rtcp_packet_builder.h"
+#include "media/cast/test/fake_task_runner.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace media {
+namespace cast {
+
+namespace {
+static const uint32 kSendingSsrc = 0x12345678;
+static const uint32 kMediaSsrc = 0x87654321;
+static const std::string kCName("test@10.1.1.1");
+} // namespace
+
+class TestRtcpTransport : public PacedPacketSender {
+ public:
+ TestRtcpTransport()
+ : expected_packet_length_(0),
+ packet_count_(0) {
+ }
+
+ virtual bool SendRtcpPacket(const Packet& packet) OVERRIDE {
+ EXPECT_EQ(expected_packet_length_, packet.size());
+ EXPECT_EQ(0, memcmp(expected_packet_, &(packet[0]), packet.size()));
+ packet_count_++;
+ return true;
+ }
+
+ virtual bool SendPackets(const PacketList& packets) OVERRIDE {
+ return false;
+ }
+
+ virtual bool ResendPackets(const PacketList& packets) OVERRIDE {
+ return false;
+ }
+
+ void SetExpectedRtcpPacket(const uint8* rtcp_buffer, size_t length) {
+ expected_packet_length_ = length;
+ memcpy(expected_packet_, rtcp_buffer, length);
+ }
+
+ int packet_count() const { return packet_count_; }
+
+ private:
+ uint8 expected_packet_[kIpPacketSize];
+ size_t expected_packet_length_;
+ int packet_count_;
+};
+
+class RtcpSenderTest : public ::testing::Test {
+ protected:
+ RtcpSenderTest()
+ : task_runner_(new test::FakeTaskRunner(&testing_clock_)),
+ cast_environment_(new CastEnvironment(&testing_clock_, task_runner_,
+ task_runner_, task_runner_, task_runner_, task_runner_,
+ GetDefaultCastLoggingConfig())),
+ rtcp_sender_(new RtcpSender(cast_environment_,
+ &test_transport_,
+ kSendingSsrc,
+ kCName)) {
+ }
+
+ base::SimpleTestTickClock testing_clock_;
+ TestRtcpTransport test_transport_;
+ scoped_refptr<test::FakeTaskRunner> task_runner_;
+ scoped_refptr<CastEnvironment> cast_environment_;
+ scoped_ptr<RtcpSender> rtcp_sender_;
+};
+
+TEST_F(RtcpSenderTest, RtcpSenderReport) {
+ RtcpSenderInfo sender_info;
+ sender_info.ntp_seconds = kNtpHigh;
+ sender_info.ntp_fraction = kNtpLow;
+ sender_info.rtp_timestamp = kRtpTimestamp;
+ sender_info.send_packet_count = kSendPacketCount;
+ sender_info.send_octet_count = kSendOctetCount;
+
+ // Sender report + c_name.
+ TestRtcpPacketBuilder p;
+ p.AddSr(kSendingSsrc, 0);
+ p.AddSdesCname(kSendingSsrc, kCName);
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ rtcp_sender_->SendRtcpFromRtpSender(RtcpSender::kRtcpSr,
+ &sender_info,
+ NULL,
+ NULL);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+}
+
+TEST_F(RtcpSenderTest, RtcpReceiverReport) {
+ // Empty receiver report + c_name.
+ TestRtcpPacketBuilder p1;
+ p1.AddRr(kSendingSsrc, 0);
+ p1.AddSdesCname(kSendingSsrc, kCName);
+ test_transport_.SetExpectedRtcpPacket(p1.Packet(), p1.Length());
+
+ rtcp_sender_->SendRtcpFromRtpReceiver(RtcpSender::kRtcpRr,
+ NULL, NULL, NULL, NULL);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+
+ // Receiver report with report block + c_name.
+ TestRtcpPacketBuilder p2;
+ p2.AddRr(kSendingSsrc, 1);
+ p2.AddRb(kMediaSsrc);
+ p2.AddSdesCname(kSendingSsrc, kCName);
+ test_transport_.SetExpectedRtcpPacket(p2.Packet(), p2.Length());
+
+ RtcpReportBlock report_block;
+ // Initialize remote_ssrc to a "clearly illegal" value.
+ report_block.remote_ssrc = 0xDEAD;
+ report_block.media_ssrc = kMediaSsrc; // SSRC of the RTP packet sender.
+ report_block.fraction_lost = kLoss >> 24;
+ report_block.cumulative_lost = kLoss; // 24 bits valid.
+ report_block.extended_high_sequence_number = kExtendedMax;
+ report_block.jitter = kTestJitter;
+ report_block.last_sr = kLastSr;
+ report_block.delay_since_last_sr = kDelayLastSr;
+
+ rtcp_sender_->SendRtcpFromRtpReceiver(RtcpSender::kRtcpRr, &report_block,
+ NULL, NULL, NULL);
+
+ EXPECT_EQ(2, test_transport_.packet_count());
+}
+
+TEST_F(RtcpSenderTest, RtcpSenderReportWithDlrr) {
+ RtcpSenderInfo sender_info;
+ sender_info.ntp_seconds = kNtpHigh;
+ sender_info.ntp_fraction = kNtpLow;
+ sender_info.rtp_timestamp = kRtpTimestamp;
+ sender_info.send_packet_count = kSendPacketCount;
+ sender_info.send_octet_count = kSendOctetCount;
+
+ // Sender report + c_name + dlrr.
+ TestRtcpPacketBuilder p1;
+ p1.AddSr(kSendingSsrc, 0);
+ p1.AddSdesCname(kSendingSsrc, kCName);
+ p1.AddXrHeader(kSendingSsrc);
+ p1.AddXrDlrrBlock(kSendingSsrc);
+ test_transport_.SetExpectedRtcpPacket(p1.Packet(), p1.Length());
+
+ RtcpDlrrReportBlock dlrr_rb;
+ dlrr_rb.last_rr = kLastRr;
+ dlrr_rb.delay_since_last_rr = kDelayLastRr;
+
+ rtcp_sender_->SendRtcpFromRtpSender(
+ RtcpSender::kRtcpSr | RtcpSender::kRtcpDlrr,
+ &sender_info,
+ &dlrr_rb,
+ NULL);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+}
+
+TEST_F(RtcpSenderTest, RtcpSenderReportWithDlrrAndLog) {
+ RtcpSenderInfo sender_info;
+ sender_info.ntp_seconds = kNtpHigh;
+ sender_info.ntp_fraction = kNtpLow;
+ sender_info.rtp_timestamp = kRtpTimestamp;
+ sender_info.send_packet_count = kSendPacketCount;
+ sender_info.send_octet_count = kSendOctetCount;
+
+ // Sender report + c_name + dlrr + sender log.
+ TestRtcpPacketBuilder p;
+ p.AddSr(kSendingSsrc, 0);
+ p.AddSdesCname(kSendingSsrc, kCName);
+ p.AddXrHeader(kSendingSsrc);
+ p.AddXrDlrrBlock(kSendingSsrc);
+ p.AddSenderLog(kSendingSsrc);
+ p.AddSenderFrameLog(kRtcpSenderFrameStatusSentToNetwork, kRtpTimestamp);
+
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpDlrrReportBlock dlrr_rb;
+ dlrr_rb.last_rr = kLastRr;
+ dlrr_rb.delay_since_last_rr = kDelayLastRr;
+
+ RtcpSenderFrameLogMessage sender_frame_log;
+ sender_frame_log.frame_status = kRtcpSenderFrameStatusSentToNetwork;
+ sender_frame_log.rtp_timestamp = kRtpTimestamp;
+
+ RtcpSenderLogMessage sender_log;
+ sender_log.push_back(sender_frame_log);
+
+ rtcp_sender_->SendRtcpFromRtpSender(
+ RtcpSender::kRtcpSr | RtcpSender::kRtcpDlrr | RtcpSender::kRtcpSenderLog,
+ &sender_info,
+ &dlrr_rb,
+ &sender_log);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+ EXPECT_TRUE(sender_log.empty());
+}
+
+TEST_F(RtcpSenderTest, RtcpSenderReporWithTooManyLogFrames) {
+ RtcpSenderInfo sender_info;
+ sender_info.ntp_seconds = kNtpHigh;
+ sender_info.ntp_fraction = kNtpLow;
+ sender_info.rtp_timestamp = kRtpTimestamp;
+ sender_info.send_packet_count = kSendPacketCount;
+ sender_info.send_octet_count = kSendOctetCount;
+
+ // Sender report + c_name + sender log.
+ TestRtcpPacketBuilder p;
+ p.AddSr(kSendingSsrc, 0);
+ p.AddSdesCname(kSendingSsrc, kCName);
+ p.AddSenderLog(kSendingSsrc);
+
+ for (int i = 0; i < 359; ++i) {
+ p.AddSenderFrameLog(kRtcpSenderFrameStatusSentToNetwork,
+ kRtpTimestamp + i * 90);
+ }
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+
+ RtcpSenderLogMessage sender_log;
+ for (int j = 0; j < 400; ++j) {
+ RtcpSenderFrameLogMessage sender_frame_log;
+ sender_frame_log.frame_status = kRtcpSenderFrameStatusSentToNetwork;
+ sender_frame_log.rtp_timestamp = kRtpTimestamp + j * 90;
+ sender_log.push_back(sender_frame_log);
+ }
+
+ rtcp_sender_->SendRtcpFromRtpSender(
+ RtcpSender::kRtcpSr | RtcpSender::kRtcpSenderLog,
+ &sender_info,
+ NULL,
+ &sender_log);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+ EXPECT_EQ(41u, sender_log.size());
+}
+
+TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtr) {
+ // Receiver report with report block + c_name.
+ TestRtcpPacketBuilder p;
+ p.AddRr(kSendingSsrc, 1);
+ p.AddRb(kMediaSsrc);
+ p.AddSdesCname(kSendingSsrc, kCName);
+ p.AddXrHeader(kSendingSsrc);
+ p.AddXrRrtrBlock();
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpReportBlock report_block;
+ // Initialize remote_ssrc to a "clearly illegal" value.
+ report_block.remote_ssrc = 0xDEAD;
+ report_block.media_ssrc = kMediaSsrc; // SSRC of the RTP packet sender.
+ report_block.fraction_lost = kLoss >> 24;
+ report_block.cumulative_lost = kLoss; // 24 bits valid.
+ report_block.extended_high_sequence_number = kExtendedMax;
+ report_block.jitter = kTestJitter;
+ report_block.last_sr = kLastSr;
+ report_block.delay_since_last_sr = kDelayLastSr;
+
+ RtcpReceiverReferenceTimeReport rrtr;
+ rrtr.ntp_seconds = kNtpHigh;
+ rrtr.ntp_fraction = kNtpLow;
+
+ rtcp_sender_->SendRtcpFromRtpReceiver(
+ RtcpSender::kRtcpRr | RtcpSender::kRtcpRrtr,
+ &report_block,
+ &rrtr,
+ NULL,
+ NULL);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+}
+
+TEST_F(RtcpSenderTest, RtcpReceiverReportWithCast) {
+ // Receiver report with report block + c_name.
+ TestRtcpPacketBuilder p;
+ p.AddRr(kSendingSsrc, 1);
+ p.AddRb(kMediaSsrc);
+ p.AddSdesCname(kSendingSsrc, kCName);
+ p.AddCast(kSendingSsrc, kMediaSsrc);
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpReportBlock report_block;
+ // Initialize remote_ssrc to a "clearly illegal" value.
+ report_block.remote_ssrc = 0xDEAD;
+ report_block.media_ssrc = kMediaSsrc; // SSRC of the RTP packet sender.
+ report_block.fraction_lost = kLoss >> 24;
+ report_block.cumulative_lost = kLoss; // 24 bits valid.
+ report_block.extended_high_sequence_number = kExtendedMax;
+ report_block.jitter = kTestJitter;
+ report_block.last_sr = kLastSr;
+ report_block.delay_since_last_sr = kDelayLastSr;
+
+ RtcpCastMessage cast_message(kMediaSsrc);
+ cast_message.ack_frame_id_ = kAckFrameId;
+ PacketIdSet missing_packets;
+ cast_message.missing_frames_and_packets_[kLostFrameId] = missing_packets;
+
+ missing_packets.insert(kLostPacketId1);
+ missing_packets.insert(kLostPacketId2);
+ missing_packets.insert(kLostPacketId3);
+ cast_message.missing_frames_and_packets_[kFrameIdWithLostPackets] =
+ missing_packets;
+
+ rtcp_sender_->SendRtcpFromRtpReceiver(
+ RtcpSender::kRtcpRr | RtcpSender::kRtcpCast,
+ &report_block,
+ NULL,
+ &cast_message,
+ NULL);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+}
+
+TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtraAndCastMessage) {
+ TestRtcpPacketBuilder p;
+ p.AddRr(kSendingSsrc, 1);
+ p.AddRb(kMediaSsrc);
+ p.AddSdesCname(kSendingSsrc, kCName);
+ p.AddXrHeader(kSendingSsrc);
+ p.AddXrRrtrBlock();
+ p.AddCast(kSendingSsrc, kMediaSsrc);
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpReportBlock report_block;
+ // Initialize remote_ssrc to a "clearly illegal" value.
+ report_block.remote_ssrc = 0xDEAD;
+ report_block.media_ssrc = kMediaSsrc; // SSRC of the RTP packet sender.
+ report_block.fraction_lost = kLoss >> 24;
+ report_block.cumulative_lost = kLoss; // 24 bits valid.
+ report_block.extended_high_sequence_number = kExtendedMax;
+ report_block.jitter = kTestJitter;
+ report_block.last_sr = kLastSr;
+ report_block.delay_since_last_sr = kDelayLastSr;
+
+ RtcpReceiverReferenceTimeReport rrtr;
+ rrtr.ntp_seconds = kNtpHigh;
+ rrtr.ntp_fraction = kNtpLow;
+
+ RtcpCastMessage cast_message(kMediaSsrc);
+ cast_message.ack_frame_id_ = kAckFrameId;
+ PacketIdSet missing_packets;
+ cast_message.missing_frames_and_packets_[kLostFrameId] = missing_packets;
+
+ missing_packets.insert(kLostPacketId1);
+ missing_packets.insert(kLostPacketId2);
+ missing_packets.insert(kLostPacketId3);
+ cast_message.missing_frames_and_packets_[kFrameIdWithLostPackets] =
+ missing_packets;
+
+ rtcp_sender_->SendRtcpFromRtpReceiver(
+ RtcpSender::kRtcpRr | RtcpSender::kRtcpRrtr | RtcpSender::kRtcpCast,
+ &report_block,
+ &rrtr,
+ &cast_message,
+ NULL);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+}
+
+TEST_F(RtcpSenderTest, RtcpReceiverReportWithRrtrCastMessageAndLog) {
+ static const uint32 kTimeBaseMs = 12345678;
+ static const uint32 kTimeDelayMs = 10;
+ static const uint32 kDelayDeltaMs = 123;
+
+ TestRtcpPacketBuilder p;
+ p.AddRr(kSendingSsrc, 1);
+ p.AddRb(kMediaSsrc);
+ p.AddSdesCname(kSendingSsrc, kCName);
+ p.AddXrHeader(kSendingSsrc);
+ p.AddXrRrtrBlock();
+ p.AddCast(kSendingSsrc, kMediaSsrc);
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpReportBlock report_block;
+ // Initialize remote_ssrc to a "clearly illegal" value.
+ report_block.remote_ssrc = 0xDEAD;
+ report_block.media_ssrc = kMediaSsrc; // SSRC of the RTP packet sender.
+ report_block.fraction_lost = kLoss >> 24;
+ report_block.cumulative_lost = kLoss; // 24 bits valid.
+ report_block.extended_high_sequence_number = kExtendedMax;
+ report_block.jitter = kTestJitter;
+ report_block.last_sr = kLastSr;
+ report_block.delay_since_last_sr = kDelayLastSr;
+
+ RtcpReceiverReferenceTimeReport rrtr;
+ rrtr.ntp_seconds = kNtpHigh;
+ rrtr.ntp_fraction = kNtpLow;
+
+ RtcpCastMessage cast_message(kMediaSsrc);
+ cast_message.ack_frame_id_ = kAckFrameId;
+ PacketIdSet missing_packets;
+ cast_message.missing_frames_and_packets_[kLostFrameId] = missing_packets;
+
+ missing_packets.insert(kLostPacketId1);
+ missing_packets.insert(kLostPacketId2);
+ missing_packets.insert(kLostPacketId3);
+ cast_message.missing_frames_and_packets_[kFrameIdWithLostPackets] =
+ missing_packets;
+
+ // Test empty Log message.
+ RtcpReceiverLogMessage receiver_log;
+
+ VLOG(1) << " Test empty Log " ;
+ rtcp_sender_->SendRtcpFromRtpReceiver(
+ RtcpSender::kRtcpRr | RtcpSender::kRtcpRrtr | RtcpSender::kRtcpCast |
+ RtcpSender::kRtcpReceiverLog,
+ &report_block,
+ &rrtr,
+ &cast_message,
+ &receiver_log);
+
+
+ base::SimpleTestTickClock testing_clock;
+ testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+
+ p.AddReceiverLog(kSendingSsrc);
+ p.AddReceiverFrameLog(kRtpTimestamp, 2, kTimeBaseMs);
+ p.AddReceiverEventLog(kDelayDeltaMs, 1, 0);
+ p.AddReceiverEventLog(kLostPacketId1, 6, kTimeDelayMs);
+
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpReceiverFrameLogMessage frame_log(kRtpTimestamp);
+ RtcpReceiverEventLogMessage event_log;
+
+ event_log.type = kAckSent;
+ event_log.event_timestamp = testing_clock.NowTicks();
+ event_log.delay_delta = base::TimeDelta::FromMilliseconds(kDelayDeltaMs);
+ frame_log.event_log_messages_.push_back(event_log);
+
+ testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ event_log.type = kPacketReceived;
+ event_log.event_timestamp = testing_clock.NowTicks();
+ event_log.packet_id = kLostPacketId1;
+ frame_log.event_log_messages_.push_back(event_log);
+
+ receiver_log.push_back(frame_log);
+
+ VLOG(1) << " Test Log " ;
+ rtcp_sender_->SendRtcpFromRtpReceiver(
+ RtcpSender::kRtcpRr | RtcpSender::kRtcpRrtr | RtcpSender::kRtcpCast |
+ RtcpSender::kRtcpReceiverLog,
+ &report_block,
+ &rrtr,
+ &cast_message,
+ &receiver_log);
+
+ EXPECT_TRUE(receiver_log.empty());
+ EXPECT_EQ(2, test_transport_.packet_count());
+}
+
+TEST_F(RtcpSenderTest, RtcpReceiverReportWithOversizedFrameLog) {
+ static const uint32 kTimeBaseMs = 12345678;
+ static const uint32 kTimeDelayMs = 10;
+ static const uint32 kDelayDeltaMs = 123;
+
+ TestRtcpPacketBuilder p;
+ p.AddRr(kSendingSsrc, 1);
+ p.AddRb(kMediaSsrc);
+ p.AddSdesCname(kSendingSsrc, kCName);
+
+ RtcpReportBlock report_block;
+ // Initialize remote_ssrc to a "clearly illegal" value.
+ report_block.remote_ssrc = 0xDEAD;
+ report_block.media_ssrc = kMediaSsrc; // SSRC of the RTP packet sender.
+ report_block.fraction_lost = kLoss >> 24;
+ report_block.cumulative_lost = kLoss; // 24 bits valid.
+ report_block.extended_high_sequence_number = kExtendedMax;
+ report_block.jitter = kTestJitter;
+ report_block.last_sr = kLastSr;
+ report_block.delay_since_last_sr = kDelayLastSr;
+
+ base::SimpleTestTickClock testing_clock;
+ testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+
+ p.AddReceiverLog(kSendingSsrc);
+
+ p.AddReceiverFrameLog(kRtpTimestamp, 1, kTimeBaseMs);
+ p.AddReceiverEventLog(kDelayDeltaMs, 1, 0);
+ p.AddReceiverFrameLog(kRtpTimestamp + 2345,
+ kRtcpMaxReceiverLogMessages, kTimeBaseMs);
+
+ for (size_t i = 0; i < kRtcpMaxReceiverLogMessages; ++i) {
+ p.AddReceiverEventLog(
+ kLostPacketId1, 6, static_cast<uint16>(kTimeDelayMs * i));
+ }
+
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpReceiverFrameLogMessage frame_1_log(kRtpTimestamp);
+ RtcpReceiverEventLogMessage event_log;
+
+ event_log.type = kAckSent;
+ event_log.event_timestamp = testing_clock.NowTicks();
+ event_log.delay_delta = base::TimeDelta::FromMilliseconds(kDelayDeltaMs);
+ frame_1_log.event_log_messages_.push_back(event_log);
+
+ RtcpReceiverLogMessage receiver_log;
+ receiver_log.push_back(frame_1_log);
+
+ RtcpReceiverFrameLogMessage frame_2_log(kRtpTimestamp + 2345);
+
+ for (int j = 0; j < 300; ++j) {
+ event_log.type = kPacketReceived;
+ event_log.event_timestamp = testing_clock.NowTicks();
+ event_log.packet_id = kLostPacketId1;
+ frame_2_log.event_log_messages_.push_back(event_log);
+ testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ }
+ receiver_log.push_back(frame_2_log);
+
+ rtcp_sender_->SendRtcpFromRtpReceiver(
+ RtcpSender::kRtcpRr | RtcpSender::kRtcpReceiverLog,
+ &report_block,
+ NULL,
+ NULL,
+ &receiver_log);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+ EXPECT_EQ(1u, receiver_log.size());
+ EXPECT_EQ(300u - kRtcpMaxReceiverLogMessages,
+ receiver_log.front().event_log_messages_.size());
+}
+
+TEST_F(RtcpSenderTest, RtcpReceiverReportWithTooManyLogFrames) {
+ static const uint32 kTimeBaseMs = 12345678;
+ static const uint32 kTimeDelayMs = 10;
+ static const uint32 kDelayDeltaMs = 123;
+
+ TestRtcpPacketBuilder p;
+ p.AddRr(kSendingSsrc, 1);
+ p.AddRb(kMediaSsrc);
+ p.AddSdesCname(kSendingSsrc, kCName);
+
+ RtcpReportBlock report_block;
+ // Initialize remote_ssrc to a "clearly illegal" value.
+ report_block.remote_ssrc = 0xDEAD;
+ report_block.media_ssrc = kMediaSsrc; // SSRC of the RTP packet sender.
+ report_block.fraction_lost = kLoss >> 24;
+ report_block.cumulative_lost = kLoss; // 24 bits valid.
+ report_block.extended_high_sequence_number = kExtendedMax;
+ report_block.jitter = kTestJitter;
+ report_block.last_sr = kLastSr;
+ report_block.delay_since_last_sr = kDelayLastSr;
+
+ base::SimpleTestTickClock testing_clock;
+ testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeBaseMs));
+
+ p.AddReceiverLog(kSendingSsrc);
+
+ for (int i = 0; i < 119; ++i) {
+ p.AddReceiverFrameLog(kRtpTimestamp, 1, kTimeBaseMs + i * kTimeDelayMs);
+ p.AddReceiverEventLog(kDelayDeltaMs, 1, 0);
+ }
+ test_transport_.SetExpectedRtcpPacket(p.Packet(), p.Length());
+
+ RtcpReceiverLogMessage receiver_log;
+
+ for (int j = 0; j < 200; ++j) {
+ RtcpReceiverFrameLogMessage frame_log(kRtpTimestamp);
+ RtcpReceiverEventLogMessage event_log;
+
+ event_log.type = kAckSent;
+ event_log.event_timestamp = testing_clock.NowTicks();
+ event_log.delay_delta = base::TimeDelta::FromMilliseconds(kDelayDeltaMs);
+ frame_log.event_log_messages_.push_back(event_log);
+ receiver_log.push_back(frame_log);
+ testing_clock.Advance(base::TimeDelta::FromMilliseconds(kTimeDelayMs));
+ }
+ rtcp_sender_->SendRtcpFromRtpReceiver(
+ RtcpSender::kRtcpRr | RtcpSender::kRtcpReceiverLog,
+ &report_block,
+ NULL,
+ NULL,
+ &receiver_log);
+
+ EXPECT_EQ(1, test_transport_.packet_count());
+ EXPECT_EQ(81u, receiver_log.size());
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/test/receiver.cc b/media/cast/test/receiver.cc
new file mode 100644
index 0000000..d7b4194e
--- /dev/null
+++ b/media/cast/test/receiver.cc
@@ -0,0 +1,264 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+#include <climits>
+#include <cstdarg>
+#include <cstdio>
+#include <string>
+
+#include "base/at_exit.h"
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "base/threading/thread.h"
+#include "base/time/default_tick_clock.h"
+#include "media/base/video_frame.h"
+#include "media/cast/cast_config.h"
+#include "media/cast/cast_environment.h"
+#include "media/cast/cast_receiver.h"
+#include "media/cast/logging/logging_defines.h"
+#include "media/cast/test/transport/transport.h"
+#include "media/cast/test/utility/input_helper.h"
+
+#if defined(OS_LINUX)
+#include "media/cast/test/linux_output_window.h"
+#endif // OS_LINUX
+
+namespace media {
+namespace cast {
+// Settings chosen to match default sender settings.
+#define DEFAULT_SEND_PORT "2346"
+#define DEFAULT_RECEIVE_PORT "2344"
+#define DEFAULT_SEND_IP "127.0.0.1"
+#define DEFAULT_RESTART "0"
+#define DEFAULT_AUDIO_FEEDBACK_SSRC "1"
+#define DEFAULT_AUDIO_INCOMING_SSRC "2"
+#define DEFAULT_AUDIO_PAYLOAD_TYPE "127"
+#define DEFAULT_VIDEO_FEEDBACK_SSRC "12"
+#define DEFAULT_VIDEO_INCOMING_SSRC "11"
+#define DEFAULT_VIDEO_PAYLOAD_TYPE "96"
+#define DEFAULT_VIDEO_CODEC_WIDTH "640"
+#define DEFAULT_VIDEO_CODEC_HEIGHT "480"
+#define DEFAULT_VIDEO_CODEC_BITRATE "2000"
+
+static const int kAudioSamplingFrequency = 48000;
+#if defined(OS_LINUX)
+const int kVideoWindowWidth = 1280;
+const int kVideoWindowHeight = 720;
+#endif // OS_LINUX
+static const int kFrameTimerMs = 33;
+
+
+void GetPorts(int* tx_port, int* rx_port) {
+ test::InputBuilder tx_input("Enter send port.",
+ DEFAULT_SEND_PORT, 1, INT_MAX);
+ *tx_port = tx_input.GetIntInput();
+
+ test::InputBuilder rx_input("Enter receive port.",
+ DEFAULT_RECEIVE_PORT, 1, INT_MAX);
+ *rx_port = rx_input.GetIntInput();
+}
+
+std::string GetIpAddress(const std::string display_text) {
+ test::InputBuilder input(display_text, DEFAULT_SEND_IP,
+ INT_MIN, INT_MAX);
+ std::string ip_address = input.GetStringInput();
+ // Ensure correct form:
+ while (std::count(ip_address.begin(), ip_address.end(), '.') != 3) {
+ ip_address = input.GetStringInput();
+ }
+ return ip_address;
+}
+
+void GetSsrcs(AudioReceiverConfig* audio_config) {
+ test::InputBuilder input_tx("Choose audio sender SSRC.",
+ DEFAULT_AUDIO_FEEDBACK_SSRC, 1, INT_MAX);
+ audio_config->feedback_ssrc = input_tx.GetIntInput();
+
+ test::InputBuilder input_rx("Choose audio receiver SSRC.",
+ DEFAULT_AUDIO_INCOMING_SSRC, 1, INT_MAX);
+ audio_config->incoming_ssrc = input_tx.GetIntInput();
+}
+
+void GetSsrcs(VideoReceiverConfig* video_config) {
+ test::InputBuilder input_tx("Choose video sender SSRC.",
+ DEFAULT_VIDEO_FEEDBACK_SSRC, 1, INT_MAX);
+ video_config->feedback_ssrc = input_tx.GetIntInput();
+
+ test::InputBuilder input_rx("Choose video receiver SSRC.",
+ DEFAULT_VIDEO_INCOMING_SSRC, 1, INT_MAX);
+ video_config->incoming_ssrc = input_rx.GetIntInput();
+}
+
+void GetPayloadtype(AudioReceiverConfig* audio_config) {
+ test::InputBuilder input("Choose audio receiver payload type.",
+ DEFAULT_AUDIO_PAYLOAD_TYPE, 96, 127);
+ audio_config->rtp_payload_type = input.GetIntInput();
+}
+
+AudioReceiverConfig GetAudioReceiverConfig() {
+ AudioReceiverConfig audio_config;
+
+ GetSsrcs(&audio_config);
+ GetPayloadtype(&audio_config);
+
+ audio_config.rtcp_c_name = "audio_receiver@a.b.c.d";
+
+ VLOG(1) << "Using OPUS 48Khz stereo";
+ audio_config.use_external_decoder = false;
+ audio_config.frequency = 48000;
+ audio_config.channels = 2;
+ audio_config.codec = kOpus;
+ return audio_config;
+}
+
+void GetPayloadtype(VideoReceiverConfig* video_config) {
+ test::InputBuilder input("Choose video receiver payload type.",
+ DEFAULT_VIDEO_PAYLOAD_TYPE, 96, 127);
+ video_config->rtp_payload_type = input.GetIntInput();
+}
+
+VideoReceiverConfig GetVideoReceiverConfig() {
+ VideoReceiverConfig video_config;
+
+ GetSsrcs(&video_config);
+ GetPayloadtype(&video_config);
+
+ video_config.rtcp_c_name = "video_receiver@a.b.c.d";
+
+ video_config.use_external_decoder = false;
+
+ VLOG(1) << "Using VP8";
+ video_config.codec = kVp8;
+ return video_config;
+}
+
+
+class ReceiveProcess : public base::RefCountedThreadSafe<ReceiveProcess> {
+ public:
+ explicit ReceiveProcess(scoped_refptr<FrameReceiver> frame_receiver)
+ : frame_receiver_(frame_receiver),
+#if defined(OS_LINUX)
+ render_(0, 0, kVideoWindowWidth, kVideoWindowHeight, "Cast_receiver"),
+#endif // OS_LINUX
+ last_playout_time_(),
+ last_render_time_() {}
+
+ void Start() {
+ GetAudioFrame(base::TimeDelta::FromMilliseconds(kFrameTimerMs));
+ GetVideoFrame();
+ }
+
+ protected:
+ virtual ~ReceiveProcess() {}
+
+ private:
+ friend class base::RefCountedThreadSafe<ReceiveProcess>;
+
+ void DisplayFrame(const scoped_refptr<media::VideoFrame>& video_frame,
+ const base::TimeTicks& render_time) {
+#ifdef OS_LINUX
+ render_.RenderFrame(video_frame);
+#endif // OS_LINUX
+ // Print out the delta between frames.
+ if (!last_render_time_.is_null()){
+ base::TimeDelta time_diff = render_time - last_render_time_;
+ VLOG(1) << " RenderDelay[mS] = " << time_diff.InMilliseconds();
+ }
+ last_render_time_ = render_time;
+ GetVideoFrame();
+ }
+
+ void ReceiveAudioFrame(scoped_ptr<PcmAudioFrame> audio_frame,
+ const base::TimeTicks& playout_time) {
+ // For audio just print the playout delta between audio frames.
+ // Default diff time is kFrameTimerMs.
+ base::TimeDelta time_diff =
+ base::TimeDelta::FromMilliseconds(kFrameTimerMs);
+ if (!last_playout_time_.is_null()){
+ time_diff = playout_time - last_playout_time_;
+ VLOG(1) << " ***PlayoutDelay[mS] = " << time_diff.InMilliseconds();
+ }
+ last_playout_time_ = playout_time;
+ GetAudioFrame(time_diff);
+ }
+
+ void GetAudioFrame(base::TimeDelta playout_diff) {
+ int num_10ms_blocks = playout_diff.InMilliseconds() / 10;
+ frame_receiver_->GetRawAudioFrame(num_10ms_blocks, kAudioSamplingFrequency,
+ base::Bind(&ReceiveProcess::ReceiveAudioFrame, this));
+ }
+
+ void GetVideoFrame() {
+ frame_receiver_->GetRawVideoFrame(
+ base::Bind(&ReceiveProcess::DisplayFrame, this));
+ }
+
+ scoped_refptr<FrameReceiver> frame_receiver_;
+#ifdef OS_LINUX
+ test::LinuxOutputWindow render_;
+#endif // OS_LINUX
+ base::TimeTicks last_playout_time_;
+ base::TimeTicks last_render_time_;
+};
+
+} // namespace cast
+} // namespace media
+
+int main(int argc, char** argv) {
+ base::AtExitManager at_exit;
+ base::MessageLoopForIO main_message_loop;
+ VLOG(1) << "Cast Receiver";
+ base::Thread main_thread("Cast main send thread");
+ base::Thread audio_thread("Cast audio decoder thread");
+ base::Thread video_thread("Cast video decoder thread");
+ main_thread.Start();
+ audio_thread.Start();
+ video_thread.Start();
+
+ base::DefaultTickClock clock;
+ // Enable receiver side threads, and disable logging.
+ scoped_refptr<media::cast::CastEnvironment> cast_environment(new
+ media::cast::CastEnvironment(&clock,
+ main_thread.message_loop_proxy(),
+ NULL,
+ audio_thread.message_loop_proxy(),
+ NULL,
+ video_thread.message_loop_proxy(),
+ media::cast::GetDefaultCastLoggingConfig()));
+
+ media::cast::AudioReceiverConfig audio_config =
+ media::cast::GetAudioReceiverConfig();
+ media::cast::VideoReceiverConfig video_config =
+ media::cast::GetVideoReceiverConfig();
+
+ scoped_ptr<media::cast::test::Transport> transport(
+ new media::cast::test::Transport(main_message_loop.message_loop_proxy()));
+ scoped_ptr<media::cast::CastReceiver> cast_receiver(
+ media::cast::CastReceiver::CreateCastReceiver(
+ cast_environment,
+ audio_config,
+ video_config,
+ transport->packet_sender()));
+
+ media::cast::PacketReceiver* packet_receiver =
+ cast_receiver->packet_receiver();
+
+ int send_to_port, receive_port;
+ media::cast::GetPorts(&send_to_port, &receive_port);
+ std::string ip_address = media::cast::GetIpAddress("Enter destination IP.");
+ std::string local_ip_address = media::cast::GetIpAddress("Enter local IP.");
+ transport->SetLocalReceiver(packet_receiver, ip_address, local_ip_address,
+ receive_port);
+ transport->SetSendDestination(ip_address, send_to_port);
+
+ scoped_refptr<media::cast::ReceiveProcess> receive_process(
+ new media::cast::ReceiveProcess(cast_receiver->frame_receiver()));
+ receive_process->Start();
+ main_message_loop.Run();
+ transport->StopReceiving();
+ return 0;
+}
diff --git a/media/cast/test/sender.cc b/media/cast/test/sender.cc
new file mode 100644
index 0000000..d99fcc1
--- /dev/null
+++ b/media/cast/test/sender.cc
@@ -0,0 +1,365 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Test application that simulates a cast sender - Data can be either generated
+// or read from a file.
+
+#include "base/at_exit.h"
+#include "base/logging.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/threading/thread.h"
+#include "base/time/default_tick_clock.h"
+#include "media/base/video_frame.h"
+#include "media/cast/cast_config.h"
+#include "media/cast/cast_environment.h"
+#include "media/cast/cast_sender.h"
+#include "media/cast/logging/logging_defines.h"
+#include "media/cast/test/audio_utility.h"
+#include "media/cast/test/transport/transport.h"
+#include "media/cast/test/utility/input_helper.h"
+#include "media/cast/test/video_utility.h"
+#include "ui/gfx/size.h"
+
+namespace media {
+namespace cast {
+// Settings chosen to match default receiver settings.
+#define DEFAULT_SEND_PORT "2344"
+#define DEFAULT_RECEIVE_PORT "2346"
+#define DEFAULT_SEND_IP "127.0.0.1"
+#define DEFAULT_READ_FROM_FILE "0"
+#define DEFAULT_PACKET_LOSS "0"
+#define DEFAULT_AUDIO_SENDER_SSRC "1"
+#define DEFAULT_AUDIO_RECEIVER_SSRC "2"
+#define DEFAULT_AUDIO_PAYLOAD_TYPE "127"
+#define DEFAULT_VIDEO_SENDER_SSRC "11"
+#define DEFAULT_VIDEO_RECEIVER_SSRC "12"
+#define DEFAULT_VIDEO_PAYLOAD_TYPE "96"
+#define DEFAULT_VIDEO_CODEC_WIDTH "1280"
+#define DEFAULT_VIDEO_CODEC_HEIGHT "720"
+#define DEFAULT_VIDEO_CODEC_BITRATE "2000"
+#define DEFAULT_VIDEO_CODEC_MAX_BITRATE "4000"
+#define DEFAULT_VIDEO_CODEC_MIN_BITRATE "1000"
+
+namespace {
+static const int kAudioChannels = 2;
+static const int kAudioSamplingFrequency = 48000;
+static const int kSoundFrequency = 1234; // Frequency of sinusoid wave.
+// The tests are commonly implemented with |kFrameTimerMs| RunTask function;
+// a normal video is 30 fps hence the 33 ms between frames.
+static const float kSoundVolume = 0.5f;
+static const int kFrameTimerMs = 33;
+
+// Dummy callback function that does nothing except to accept ownership of
+// |audio_bus| for destruction. This guarantees that the audio_bus is valid for
+// the entire duration of the encode/send process (not equivalent to DoNothing).
+void OwnThatAudioBus(scoped_ptr<AudioBus> audio_bus) {
+}
+} // namespace
+
+void GetPorts(int* tx_port, int* rx_port) {
+ test::InputBuilder tx_input("Enter send port.",
+ DEFAULT_SEND_PORT, 1, INT_MAX);
+ *tx_port = tx_input.GetIntInput();
+
+ test::InputBuilder rx_input("Enter receive port.",
+ DEFAULT_RECEIVE_PORT, 1, INT_MAX);
+ *rx_port = rx_input.GetIntInput();
+}
+
+int GetPacketLoss() {
+ test::InputBuilder input("Enter send side packet loss %.",
+ DEFAULT_PACKET_LOSS, 0, 99);
+ return input.GetIntInput();
+}
+
+std::string GetIpAddress(const std::string display_text) {
+ test::InputBuilder input(display_text, DEFAULT_SEND_IP,
+ INT_MIN, INT_MAX);
+ std::string ip_address = input.GetStringInput();
+ // Verify correct form:
+ while (std::count(ip_address.begin(), ip_address.end(), '.') != 3) {
+ ip_address = input.GetStringInput();
+ }
+ return ip_address;
+}
+
+bool ReadFromFile() {
+ test::InputBuilder input("Enter 1 to read from file.", DEFAULT_READ_FROM_FILE,
+ 0, 1);
+ return (1 == input.GetIntInput());
+}
+
+std::string GetVideoFile() {
+ test::InputBuilder input("Enter file and path to raw video file.","",
+ INT_MIN, INT_MAX);
+ return input.GetStringInput();
+}
+
+void GetSsrcs(AudioSenderConfig* audio_config) {
+ test::InputBuilder input_tx("Choose audio sender SSRC.",
+ DEFAULT_AUDIO_SENDER_SSRC, 1, INT_MAX);
+ audio_config->sender_ssrc = input_tx.GetIntInput();
+
+ test::InputBuilder input_rx("Choose audio receiver SSRC.",
+ DEFAULT_AUDIO_RECEIVER_SSRC, 1, INT_MAX);
+ audio_config->incoming_feedback_ssrc = input_rx.GetIntInput();
+}
+
+void GetSsrcs(VideoSenderConfig* video_config) {
+ test::InputBuilder input_tx("Choose video sender SSRC.",
+ DEFAULT_VIDEO_SENDER_SSRC, 1, INT_MAX);
+ video_config->sender_ssrc = input_tx.GetIntInput();
+
+ test::InputBuilder input_rx("Choose video receiver SSRC.",
+ DEFAULT_VIDEO_RECEIVER_SSRC, 1, INT_MAX);
+ video_config->incoming_feedback_ssrc = input_rx.GetIntInput();
+}
+
+void GetPayloadtype(AudioSenderConfig* audio_config) {
+ test::InputBuilder input("Choose audio sender payload type.",
+ DEFAULT_AUDIO_PAYLOAD_TYPE, 96, 127);
+ audio_config->rtp_payload_type = input.GetIntInput();
+}
+
+AudioSenderConfig GetAudioSenderConfig() {
+ AudioSenderConfig audio_config;
+
+ GetSsrcs(&audio_config);
+ GetPayloadtype(&audio_config);
+
+ audio_config.rtcp_c_name = "audio_sender@a.b.c.d";
+
+ VLOG(1) << "Using OPUS 48Khz stereo at 64kbit/s";
+ audio_config.use_external_encoder = false;
+ audio_config.frequency = kAudioSamplingFrequency;
+ audio_config.channels = kAudioChannels;
+ audio_config.bitrate = 64000;
+ audio_config.codec = kOpus;
+ return audio_config;
+}
+
+void GetPayloadtype(VideoSenderConfig* video_config) {
+ test::InputBuilder input("Choose video sender payload type.",
+ DEFAULT_VIDEO_PAYLOAD_TYPE, 96, 127);
+ video_config->rtp_payload_type = input.GetIntInput();
+}
+
+void GetVideoCodecSize(VideoSenderConfig* video_config) {
+ test::InputBuilder input_width("Choose video width.",
+ DEFAULT_VIDEO_CODEC_WIDTH, 144, 1920);
+ video_config->width = input_width.GetIntInput();
+
+ test::InputBuilder input_height("Choose video height.",
+ DEFAULT_VIDEO_CODEC_HEIGHT, 176, 1080);
+ video_config->height = input_height.GetIntInput();
+}
+
+void GetVideoBitrates(VideoSenderConfig* video_config) {
+ test::InputBuilder input_start_br("Choose start bitrate[kbps].",
+ DEFAULT_VIDEO_CODEC_BITRATE, 0, INT_MAX);
+ video_config->start_bitrate = input_start_br.GetIntInput() * 1000;
+
+ test::InputBuilder input_max_br("Choose max bitrate[kbps].",
+ DEFAULT_VIDEO_CODEC_MAX_BITRATE, 0, INT_MAX);
+ video_config->max_bitrate = input_max_br.GetIntInput() * 1000;
+
+ test::InputBuilder input_min_br("Choose min bitrate[kbps].",
+ DEFAULT_VIDEO_CODEC_MIN_BITRATE, 0, INT_MAX);
+ video_config->min_bitrate = input_min_br.GetIntInput() * 1000;
+}
+
+VideoSenderConfig GetVideoSenderConfig() {
+ VideoSenderConfig video_config;
+
+ GetSsrcs(&video_config);
+ GetPayloadtype(&video_config);
+ GetVideoCodecSize(&video_config);
+ GetVideoBitrates(&video_config);
+
+ video_config.rtcp_c_name = "video_sender@a.b.c.d";
+
+ video_config.use_external_encoder = false;
+
+ VLOG(1) << "Using VP8 at 30 fps";
+ video_config.min_qp = 4;
+ video_config.max_qp = 40;
+ video_config.max_frame_rate = 30;
+ video_config.codec = kVp8;
+ video_config.max_number_of_video_buffers_used = 1;
+ video_config.number_of_cores = 1;
+ return video_config;
+}
+
+class SendProcess {
+ public:
+ SendProcess(scoped_refptr<base::TaskRunner> thread_proxy,
+ base::TickClock* clock,
+ const VideoSenderConfig& video_config,
+ FrameInput* frame_input)
+ : test_app_thread_proxy_(thread_proxy),
+ video_config_(video_config),
+ audio_diff_(kFrameTimerMs),
+ frame_input_(frame_input),
+ synthetic_count_(0),
+ clock_(clock),
+ start_time_(),
+ send_time_(),
+ weak_factory_(this) {
+ audio_bus_factory_.reset(new TestAudioBusFactory(kAudioChannels,
+ kAudioSamplingFrequency, kSoundFrequency, kSoundVolume));
+ if (ReadFromFile()) {
+ std::string video_file_name = GetVideoFile();
+ video_file_ = fopen(video_file_name.c_str(), "r");
+ if (video_file_ == NULL) {
+ LOG(ERROR) << "Failed to open file";
+ exit(-1);
+ }
+ } else {
+ video_file_ = NULL;
+ }
+ }
+
+ ~SendProcess() {
+ if (video_file_)
+ fclose(video_file_);
+ }
+
+ void SendFrame() {
+ // Make sure that we don't drift.
+ int num_10ms_blocks = audio_diff_ / 10;
+ // Avoid drift.
+ audio_diff_ += kFrameTimerMs - num_10ms_blocks * 10;
+
+ scoped_ptr<AudioBus> audio_bus(audio_bus_factory_->NextAudioBus(
+ base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks));
+ AudioBus* const audio_bus_ptr = audio_bus.get();
+ frame_input_->InsertAudio(audio_bus_ptr, clock_->NowTicks(),
+ base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
+
+ gfx::Size size(video_config_.width, video_config_.height);
+ // TODO(mikhal): Use the provided timestamp.
+ if (start_time_.is_null())
+ start_time_ = clock_->NowTicks();
+ base::TimeDelta time_diff = clock_->NowTicks() - start_time_;
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::CreateFrame(
+ VideoFrame::I420, size, gfx::Rect(size), size, time_diff);
+ if (video_file_) {
+ if (!PopulateVideoFrameFromFile(video_frame, video_file_))
+ return;
+ } else {
+ PopulateVideoFrame(video_frame, synthetic_count_);
+ ++synthetic_count_;
+ }
+
+ // Time the sending of the frame to match the set frame rate.
+ // Sleep if that time has yet to elapse.
+ base::TimeTicks now = clock_->NowTicks();
+ base::TimeDelta video_frame_time =
+ base::TimeDelta::FromMilliseconds(kFrameTimerMs);
+ base::TimeDelta elapsed_time = now - send_time_;
+ if (elapsed_time < video_frame_time) {
+ VLOG(2) << "Wait" <<
+ (video_frame_time - elapsed_time).InMilliseconds();
+ test_app_thread_proxy_->PostDelayedTask(FROM_HERE,
+ base::Bind(&SendProcess::SendVideoFrameOnTime, base::Unretained(this),
+ video_frame),
+ video_frame_time - elapsed_time);
+ } else {
+ test_app_thread_proxy_->PostTask(FROM_HERE,
+ base::Bind(&SendProcess::SendVideoFrameOnTime, base::Unretained(this),
+ video_frame));
+ }
+ }
+
+ void SendVideoFrameOnTime(scoped_refptr<media::VideoFrame> video_frame) {
+ send_time_ = clock_->NowTicks();
+ frame_input_->InsertRawVideoFrame(video_frame, send_time_);
+ test_app_thread_proxy_->PostTask(FROM_HERE,
+ base::Bind(&SendProcess::SendFrame, base::Unretained(this)));
+ }
+
+ private:
+ scoped_refptr<base::TaskRunner> test_app_thread_proxy_;
+ const VideoSenderConfig video_config_;
+ int audio_diff_;
+ const scoped_refptr<FrameInput> frame_input_;
+ FILE* video_file_;
+ uint8 synthetic_count_;
+ base::TickClock* const clock_; // Not owned by this class.
+ base::TimeTicks start_time_;
+ base::TimeTicks send_time_;
+ scoped_ptr<TestAudioBusFactory> audio_bus_factory_;
+ base::WeakPtrFactory<SendProcess> weak_factory_;
+};
+
+} // namespace cast
+} // namespace media
+
+
+int main(int argc, char** argv) {
+ base::AtExitManager at_exit;
+ VLOG(1) << "Cast Sender";
+ base::Thread test_thread("Cast sender test app thread");
+ base::Thread main_thread("Cast main send thread");
+ base::Thread audio_thread("Cast audio encoder thread");
+ base::Thread video_thread("Cast video encoder thread");
+ test_thread.Start();
+ main_thread.Start();
+ audio_thread.Start();
+ video_thread.Start();
+
+ base::DefaultTickClock clock;
+ base::MessageLoopForIO io_message_loop;
+
+ // Enable main and send side threads only. Disable logging.
+ scoped_refptr<media::cast::CastEnvironment> cast_environment(new
+ media::cast::CastEnvironment(
+ &clock,
+ main_thread.message_loop_proxy(),
+ audio_thread.message_loop_proxy(),
+ NULL,
+ video_thread.message_loop_proxy(),
+ NULL,
+ media::cast::GetDefaultCastLoggingConfig()));
+
+ media::cast::AudioSenderConfig audio_config =
+ media::cast::GetAudioSenderConfig();
+ media::cast::VideoSenderConfig video_config =
+ media::cast::GetVideoSenderConfig();
+
+ scoped_ptr<media::cast::test::Transport> transport(
+ new media::cast::test::Transport(io_message_loop.message_loop_proxy()));
+ scoped_ptr<media::cast::CastSender> cast_sender(
+ media::cast::CastSender::CreateCastSender(cast_environment,
+ audio_config,
+ video_config,
+ NULL, // VideoEncoderController.
+ transport->packet_sender()));
+
+ media::cast::PacketReceiver* packet_receiver = cast_sender->packet_receiver();
+
+ int send_to_port, receive_port;
+ media::cast::GetPorts(&send_to_port, &receive_port);
+ std::string ip_address = media::cast::GetIpAddress("Enter destination IP.");
+ std::string local_ip_address = media::cast::GetIpAddress("Enter local IP.");
+ int packet_loss_percentage = media::cast::GetPacketLoss();
+
+ transport->SetLocalReceiver(packet_receiver, ip_address, local_ip_address,
+ receive_port);
+ transport->SetSendDestination(ip_address, send_to_port);
+ transport->SetSendSidePacketLoss(packet_loss_percentage);
+
+ media::cast::FrameInput* frame_input = cast_sender->frame_input();
+ scoped_ptr<media::cast::SendProcess> send_process(new
+ media::cast::SendProcess(test_thread.message_loop_proxy(),
+ cast_environment->Clock(),
+ video_config,
+ frame_input));
+
+ send_process->SendFrame();
+ io_message_loop.Run();
+ transport->StopReceiving();
+ return 0;
+}
diff --git a/media/cast/test/transport/transport.cc b/media/cast/test/transport/transport.cc
new file mode 100644
index 0000000..3cb85ed
--- /dev/null
+++ b/media/cast/test/transport/transport.cc
@@ -0,0 +1,218 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/test/transport/transport.h"
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "base/rand_util.h"
+#include "net/base/io_buffer.h"
+#include "net/base/rand_callback.h"
+#include "net/base/test_completion_callback.h"
+
+namespace media {
+namespace cast {
+namespace test {
+
+const int kMaxPacketSize = 1500;
+
+class LocalUdpTransportData;
+
+void CreateUDPAddress(std::string ip_str, int port, net::IPEndPoint* address) {
+ net::IPAddressNumber ip_number;
+ bool rv = net::ParseIPLiteralToNumber(ip_str, &ip_number);
+ if (!rv)
+ return;
+ *address = net::IPEndPoint(ip_number, port);
+}
+
+class LocalUdpTransportData
+ : public base::RefCountedThreadSafe<LocalUdpTransportData> {
+ public:
+ LocalUdpTransportData(net::UDPServerSocket* udp_socket,
+ scoped_refptr<base::TaskRunner> io_thread_proxy)
+ : udp_socket_(udp_socket),
+ buffer_(new net::IOBufferWithSize(kMaxPacketSize)),
+ io_thread_proxy_(io_thread_proxy) {
+ }
+
+ void ListenTo(net::IPEndPoint bind_address) {
+ DCHECK(io_thread_proxy_->RunsTasksOnCurrentThread());
+
+ bind_address_ = bind_address;
+ io_thread_proxy_->PostTask(FROM_HERE,
+ base::Bind(&LocalUdpTransportData::RecvFromSocketLoop, this));
+ }
+
+ void DeletePacket(uint8* data) {
+ // Should be called from the receiver (not on the transport thread).
+ DCHECK(!(io_thread_proxy_->RunsTasksOnCurrentThread()));
+ delete [] data;
+ }
+
+ void PacketReceived(int size) {
+ DCHECK(io_thread_proxy_->RunsTasksOnCurrentThread());
+ // Got a packet with length result.
+ uint8* data = new uint8[size];
+ memcpy(data, buffer_->data(), size);
+ packet_receiver_->ReceivedPacket(data, size,
+ base::Bind(&LocalUdpTransportData::DeletePacket, this, data));
+ RecvFromSocketLoop();
+
+ }
+
+ void RecvFromSocketLoop() {
+ DCHECK(io_thread_proxy_->RunsTasksOnCurrentThread());
+ // Callback should always trigger with a packet.
+ int res = udp_socket_->RecvFrom(buffer_.get(), kMaxPacketSize,
+ &bind_address_, base::Bind(&LocalUdpTransportData::PacketReceived,
+ this));
+ DCHECK(res >= net::ERR_IO_PENDING);
+ if (res > 0) {
+ PacketReceived(res);
+ }
+ }
+
+ void set_packet_receiver(PacketReceiver* packet_receiver) {
+ packet_receiver_ = packet_receiver;
+ }
+
+ void Close() {
+ udp_socket_->Close();
+ }
+
+ protected:
+ virtual ~LocalUdpTransportData() {}
+
+ private:
+ friend class base::RefCountedThreadSafe<LocalUdpTransportData>;
+
+ net::UDPServerSocket* udp_socket_;
+ net::IPEndPoint bind_address_;
+ PacketReceiver* packet_receiver_;
+ scoped_refptr<net::IOBufferWithSize> buffer_;
+ scoped_refptr<base::TaskRunner> io_thread_proxy_;
+
+ DISALLOW_COPY_AND_ASSIGN(LocalUdpTransportData);
+};
+
+class LocalPacketSender : public PacketSender,
+ public base::RefCountedThreadSafe<LocalPacketSender> {
+ public:
+ LocalPacketSender(net::UDPServerSocket* udp_socket,
+ scoped_refptr<base::TaskRunner> io_thread_proxy)
+ : udp_socket_(udp_socket),
+ send_address_(),
+ loss_limit_(0),
+ io_thread_proxy_(io_thread_proxy) {}
+
+ virtual bool SendPacket(const Packet& packet) OVERRIDE {
+ io_thread_proxy_->PostTask(FROM_HERE,
+ base::Bind(&LocalPacketSender::SendPacketToNetwork, this, packet));
+ return true;
+ }
+
+ virtual void SendPacketToNetwork(const Packet& packet) {
+ DCHECK(io_thread_proxy_->RunsTasksOnCurrentThread());
+ const uint8* data = packet.data();
+ if (loss_limit_ > 0) {
+ int r = base::RandInt(0, 100);
+ if (r < loss_limit_) {
+ VLOG(3) << "Drop packet f:" << static_cast<int>(data[12 + 1])
+ << " p:" << static_cast<int>(data[12 + 3])
+ << " m:" << static_cast<int>(data[12 + 5]);
+ return;
+ }
+ }
+ net::TestCompletionCallback callback;
+ scoped_refptr<net::WrappedIOBuffer> buffer(
+ new net::WrappedIOBuffer(reinterpret_cast<const char*>(data)));
+ udp_socket_->SendTo(buffer.get(), static_cast<int>(packet.size()),
+ send_address_, callback.callback());
+ }
+
+ virtual bool SendPackets(const PacketList& packets) OVERRIDE {
+ bool out_val = true;
+ for (size_t i = 0; i < packets.size(); ++i) {
+ const Packet& packet = packets[i];
+ out_val |= SendPacket(packet);
+ }
+ return out_val;
+ }
+
+ void SetPacketLoss(int percentage) {
+ DCHECK_GE(percentage, 0);
+ DCHECK_LT(percentage, 100);
+ loss_limit_ = percentage;
+ }
+
+ void SetSendAddress(const net::IPEndPoint& send_address) {
+ send_address_ = send_address;
+ }
+
+ protected:
+ virtual ~LocalPacketSender() {}
+
+ private:
+ friend class base::RefCountedThreadSafe<LocalPacketSender>;
+
+ net::UDPServerSocket* udp_socket_; // Not owned by this class.
+ net::IPEndPoint send_address_;
+ int loss_limit_;
+ scoped_refptr<base::TaskRunner> io_thread_proxy_;
+};
+
+Transport::Transport(
+ scoped_refptr<base::TaskRunner> io_thread_proxy)
+ : udp_socket_(new net::UDPServerSocket(NULL, net::NetLog::Source())),
+ local_udp_transport_data_(new LocalUdpTransportData(udp_socket_.get(),
+ io_thread_proxy)),
+ packet_sender_(new LocalPacketSender(udp_socket_.get(), io_thread_proxy)),
+ io_thread_proxy_(io_thread_proxy) {}
+
+Transport::~Transport() {}
+
+PacketSender* Transport::packet_sender() {
+ return static_cast<PacketSender*>(packet_sender_.get());
+}
+
+void Transport::SetSendSidePacketLoss(int percentage) {
+ packet_sender_->SetPacketLoss(percentage);
+}
+
+void Transport::StopReceiving() {
+ local_udp_transport_data_->Close();
+}
+
+void Transport::SetLocalReceiver(PacketReceiver* packet_receiver,
+ std::string ip_address,
+ std::string local_ip_address,
+ int port) {
+ DCHECK(io_thread_proxy_->RunsTasksOnCurrentThread());
+ net::IPEndPoint bind_address, local_bind_address;
+ CreateUDPAddress(ip_address, port, &bind_address);
+ CreateUDPAddress(local_ip_address, port, &local_bind_address);
+ local_udp_transport_data_->set_packet_receiver(packet_receiver);
+ udp_socket_->AllowAddressReuse();
+ udp_socket_->SetMulticastLoopbackMode(true);
+ udp_socket_->Listen(local_bind_address);
+
+ // Start listening once receiver has been set.
+ local_udp_transport_data_->ListenTo(bind_address);
+}
+
+void Transport::SetSendDestination(std::string ip_address, int port) {
+ net::IPEndPoint send_address;
+ CreateUDPAddress(ip_address, port, &send_address);
+ packet_sender_->SetSendAddress(send_address);
+}
+
+} // namespace test
+} // namespace cast
+} // namespace media
diff --git a/media/cast/video_receiver/codecs/vp8/vp8_decoder.cc b/media/cast/video_receiver/codecs/vp8/vp8_decoder.cc
new file mode 100644
index 0000000..1d173dd
--- /dev/null
+++ b/media/cast/video_receiver/codecs/vp8/vp8_decoder.cc
@@ -0,0 +1,106 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/video_receiver/codecs/vp8/vp8_decoder.h"
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
+#include "third_party/libvpx/source/libvpx/vpx/vp8dx.h"
+#include "ui/gfx/size.h"
+
+namespace media {
+namespace cast {
+
+void LogFrameDecodedEvent(CastEnvironment* const cast_environment,
+ uint32 frame_id) {
+// TODO(mikhal): Sort out passing of rtp_timestamp.
+// cast_environment->Logging()->InsertFrameEvent(kVideoFrameDecoded,
+// 0, frame_id);
+}
+
+Vp8Decoder::Vp8Decoder(scoped_refptr<CastEnvironment> cast_environment)
+ : decoder_(new vpx_dec_ctx_t()),
+ cast_environment_(cast_environment) {
+ // Make sure that we initialize the decoder from the correct thread.
+ cast_environment_->PostTask(CastEnvironment::VIDEO_DECODER, FROM_HERE,
+ base::Bind(&Vp8Decoder::InitDecoder, base::Unretained(this)));
+}
+
+Vp8Decoder::~Vp8Decoder() {}
+
+void Vp8Decoder::InitDecoder() {
+ vpx_codec_dec_cfg_t cfg;
+ // Initializing to use one core.
+ cfg.threads = 1;
+ vpx_codec_flags_t flags = VPX_CODEC_USE_POSTPROC;
+
+ if (vpx_codec_dec_init(decoder_.get(), vpx_codec_vp8_dx(), &cfg, flags)) {
+ DCHECK(false) << "VP8 decode error";
+ }
+}
+
+bool Vp8Decoder::Decode(const EncodedVideoFrame* encoded_frame,
+ const base::TimeTicks render_time,
+ const VideoFrameDecodedCallback& frame_decoded_cb) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::VIDEO_DECODER));
+ const int frame_id_int = static_cast<int>(encoded_frame->frame_id);
+ VLOG(2) << "VP8 decode frame:" << frame_id_int
+ << " sized:" << encoded_frame->data.size();
+
+ if (encoded_frame->data.empty()) return false;
+
+ vpx_codec_iter_t iter = NULL;
+ vpx_image_t* img;
+ if (vpx_codec_decode(
+ decoder_.get(),
+ reinterpret_cast<const uint8*>(encoded_frame->data.data()),
+ static_cast<unsigned int>(encoded_frame->data.size()),
+ 0,
+ 1 /* real time*/)) {
+ VLOG(1) << "Failed to decode VP8 frame.";
+ return false;
+ }
+
+ img = vpx_codec_get_frame(decoder_.get(), &iter);
+ if (img == NULL) {
+ VLOG(1) << "Skip rendering VP8 frame:" << frame_id_int;
+ return false;
+ }
+
+ gfx::Size visible_size(img->d_w, img->d_h);
+ gfx::Size full_size(img->stride[VPX_PLANE_Y], img->d_h);
+ DCHECK(VideoFrame::IsValidConfig(VideoFrame::I420, visible_size,
+ gfx::Rect(visible_size), full_size));
+ // Temp timing setting - will sort out timing in a follow up cl.
+ scoped_refptr<VideoFrame> decoded_frame =
+ VideoFrame::CreateFrame(VideoFrame::I420, visible_size,
+ gfx::Rect(visible_size), full_size, base::TimeDelta());
+
+ // Copy each plane individually (need to account for stride).
+ // TODO(mikhal): Eliminate copy once http://crbug.com/321856 is resolved.
+ CopyPlane(VideoFrame::kYPlane, img->planes[VPX_PLANE_Y],
+ img->stride[VPX_PLANE_Y], img->d_h, decoded_frame.get());
+ CopyPlane(VideoFrame::kUPlane, img->planes[VPX_PLANE_U],
+ img->stride[VPX_PLANE_U], (img->d_h + 1) / 2, decoded_frame.get());
+ CopyPlane(VideoFrame::kVPlane, img->planes[VPX_PLANE_V],
+ img->stride[VPX_PLANE_V], (img->d_h + 1) / 2, decoded_frame.get());
+
+ // Log:: Decoding complete (should be called from the main thread).
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE, base::Bind(
+ LogFrameDecodedEvent, cast_environment_,encoded_frame->frame_id));
+
+ VLOG(2) << "Decoded frame " << frame_id_int;
+ // Frame decoded - return frame to the user via callback.
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(frame_decoded_cb, decoded_frame, render_time));
+
+ return true;
+}
+
+} // namespace cast
+} // namespace media
+
diff --git a/media/cast/video_receiver/video_receiver.cc b/media/cast/video_receiver/video_receiver.cc
new file mode 100644
index 0000000..d422414
--- /dev/null
+++ b/media/cast/video_receiver/video_receiver.cc
@@ -0,0 +1,471 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/video_receiver/video_receiver.h"
+
+#include <algorithm>
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/cast/cast_defines.h"
+#include "media/cast/framer/framer.h"
+#include "media/cast/video_receiver/video_decoder.h"
+
+namespace media {
+namespace cast {
+
+const int64 kMinSchedulingDelayMs = 1;
+
+static const int64 kMinTimeBetweenOffsetUpdatesMs = 2000;
+static const int kTimeOffsetFilter = 8;
+static const int64_t kMinProcessIntervalMs = 5;
+
+// Local implementation of RtpData (defined in rtp_rtcp_defines.h).
+// Used to pass payload data into the video receiver.
+class LocalRtpVideoData : public RtpData {
+ public:
+ explicit LocalRtpVideoData(VideoReceiver* video_receiver)
+ : video_receiver_(video_receiver) {}
+
+ virtual ~LocalRtpVideoData() {}
+
+ virtual void OnReceivedPayloadData(const uint8* payload_data,
+ size_t payload_size,
+ const RtpCastHeader* rtp_header) OVERRIDE {
+ video_receiver_->IncomingParsedRtpPacket(payload_data, payload_size,
+ *rtp_header);
+ }
+
+ private:
+ VideoReceiver* video_receiver_;
+};
+
+// Local implementation of RtpPayloadFeedback (defined in rtp_defines.h)
+// Used to convey cast-specific feedback from receiver to sender.
+// Callback triggered by the Framer (cast message builder).
+class LocalRtpVideoFeedback : public RtpPayloadFeedback {
+ public:
+ explicit LocalRtpVideoFeedback(VideoReceiver* video_receiver)
+ : video_receiver_(video_receiver) {
+ }
+
+ virtual void CastFeedback(const RtcpCastMessage& cast_message) OVERRIDE {
+ video_receiver_->CastFeedback(cast_message);
+ }
+
+ private:
+ VideoReceiver* video_receiver_;
+};
+
+// Local implementation of RtpReceiverStatistics (defined by rtcp.h).
+// Used to pass statistics data from the RTP module to the RTCP module.
+class LocalRtpReceiverStatistics : public RtpReceiverStatistics {
+ public:
+ explicit LocalRtpReceiverStatistics(RtpReceiver* rtp_receiver)
+ : rtp_receiver_(rtp_receiver) {
+ }
+
+ virtual void GetStatistics(uint8* fraction_lost,
+ uint32* cumulative_lost, // 24 bits valid.
+ uint32* extended_high_sequence_number,
+ uint32* jitter) OVERRIDE {
+ rtp_receiver_->GetStatistics(fraction_lost,
+ cumulative_lost,
+ extended_high_sequence_number,
+ jitter);
+ }
+
+ private:
+ RtpReceiver* rtp_receiver_;
+};
+
+VideoReceiver::VideoReceiver(scoped_refptr<CastEnvironment> cast_environment,
+ const VideoReceiverConfig& video_config,
+ PacedPacketSender* const packet_sender)
+ : cast_environment_(cast_environment),
+ codec_(video_config.codec),
+ target_delay_delta_(
+ base::TimeDelta::FromMilliseconds(video_config.rtp_max_delay_ms)),
+ frame_delay_(base::TimeDelta::FromMilliseconds(
+ 1000 / video_config.max_frame_rate)),
+ incoming_payload_callback_(new LocalRtpVideoData(this)),
+ incoming_payload_feedback_(new LocalRtpVideoFeedback(this)),
+ rtp_receiver_(cast_environment_->Clock(), NULL, &video_config,
+ incoming_payload_callback_.get()),
+ rtp_video_receiver_statistics_(
+ new LocalRtpReceiverStatistics(&rtp_receiver_)),
+ time_incoming_packet_updated_(false),
+ incoming_rtp_timestamp_(0),
+ weak_factory_(this) {
+ int max_unacked_frames = video_config.rtp_max_delay_ms *
+ video_config.max_frame_rate / 1000;
+ DCHECK(max_unacked_frames) << "Invalid argument";
+
+ if (video_config.aes_iv_mask.size() == kAesKeySize &&
+ video_config.aes_key.size() == kAesKeySize) {
+ iv_mask_ = video_config.aes_iv_mask;
+ crypto::SymmetricKey* key = crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES, video_config.aes_key);
+ decryptor_.reset(new crypto::Encryptor());
+ decryptor_->Init(key, crypto::Encryptor::CTR, std::string());
+ } else if (video_config.aes_iv_mask.size() != 0 ||
+ video_config.aes_key.size() != 0) {
+ DCHECK(false) << "Invalid crypto configuration";
+ }
+
+ framer_.reset(new Framer(cast_environment->Clock(),
+ incoming_payload_feedback_.get(),
+ video_config.incoming_ssrc,
+ video_config.decoder_faster_than_max_frame_rate,
+ max_unacked_frames));
+ if (!video_config.use_external_decoder) {
+ video_decoder_.reset(new VideoDecoder(video_config, cast_environment));
+ }
+
+ rtcp_.reset(
+ new Rtcp(cast_environment_,
+ NULL,
+ packet_sender,
+ NULL,
+ rtp_video_receiver_statistics_.get(),
+ video_config.rtcp_mode,
+ base::TimeDelta::FromMilliseconds(video_config.rtcp_interval),
+ video_config.feedback_ssrc,
+ video_config.incoming_ssrc,
+ video_config.rtcp_c_name));
+}
+
+VideoReceiver::~VideoReceiver() {}
+
+void VideoReceiver::InitializeTimers() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ ScheduleNextRtcpReport();
+ ScheduleNextCastMessage();
+}
+
+void VideoReceiver::GetRawVideoFrame(
+ const VideoFrameDecodedCallback& callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ GetEncodedVideoFrame(base::Bind(&VideoReceiver::DecodeVideoFrame,
+ base::Unretained(this), callback));
+}
+
+// Called when we have a frame to decode.
+void VideoReceiver::DecodeVideoFrame(
+ const VideoFrameDecodedCallback& callback,
+ scoped_ptr<EncodedVideoFrame> encoded_frame,
+ const base::TimeTicks& render_time) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ // Hand the ownership of the encoded frame to the decode thread.
+ cast_environment_->PostTask(CastEnvironment::VIDEO_DECODER, FROM_HERE,
+ base::Bind(&VideoReceiver::DecodeVideoFrameThread, base::Unretained(this),
+ base::Passed(&encoded_frame), render_time, callback));
+}
+
+// Utility function to run the decoder on a designated decoding thread.
+void VideoReceiver::DecodeVideoFrameThread(
+ scoped_ptr<EncodedVideoFrame> encoded_frame,
+ const base::TimeTicks render_time,
+ const VideoFrameDecodedCallback& frame_decoded_callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::VIDEO_DECODER));
+ DCHECK(video_decoder_);
+
+ if (!(video_decoder_->DecodeVideoFrame(encoded_frame.get(), render_time,
+ frame_decoded_callback))) {
+ // This will happen if we decide to decode but not show a frame.
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoReceiver::GetRawVideoFrame, base::Unretained(this),
+ frame_decoded_callback));
+ }
+}
+
+bool VideoReceiver::DecryptVideoFrame(
+ scoped_ptr<EncodedVideoFrame>* video_frame) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(decryptor_) << "Invalid state";
+
+ if (!decryptor_->SetCounter(GetAesNonce((*video_frame)->frame_id,
+ iv_mask_))) {
+ NOTREACHED() << "Failed to set counter";
+ return false;
+ }
+ std::string decrypted_video_data;
+ if (!decryptor_->Decrypt((*video_frame)->data, &decrypted_video_data)) {
+ LOG(ERROR) << "Decryption error";
+ // Give up on this frame, release it from jitter buffer.
+ framer_->ReleaseFrame((*video_frame)->frame_id);
+ return false;
+ }
+ (*video_frame)->data.swap(decrypted_video_data);
+ return true;
+}
+
+// Called from the main cast thread.
+void VideoReceiver::GetEncodedVideoFrame(
+ const VideoFrameEncodedCallback& callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ scoped_ptr<EncodedVideoFrame> encoded_frame(new EncodedVideoFrame());
+ uint32 rtp_timestamp = 0;
+ bool next_frame = false;
+
+ if (!framer_->GetEncodedVideoFrame(encoded_frame.get(), &rtp_timestamp,
+ &next_frame)) {
+ // We have no video frames. Wait for new packet(s).
+ queued_encoded_callbacks_.push_back(callback);
+ return;
+ }
+
+ if (decryptor_ && !DecryptVideoFrame(&encoded_frame)) {
+ // Logging already done.
+ queued_encoded_callbacks_.push_back(callback);
+ return;
+ }
+
+ base::TimeTicks render_time;
+ if (PullEncodedVideoFrame(rtp_timestamp, next_frame, &encoded_frame,
+ &render_time)) {
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(callback, base::Passed(&encoded_frame), render_time));
+ } else {
+ // We have a video frame; however we are missing packets and we have time
+ // to wait for new packet(s).
+ queued_encoded_callbacks_.push_back(callback);
+ }
+}
+
+// Should we pull the encoded video frame from the framer? decided by if this is
+// the next frame or we are running out of time and have to pull the following
+// frame.
+// If the frame is too old to be rendered we set the don't show flag in the
+// video bitstream where possible.
+bool VideoReceiver::PullEncodedVideoFrame(uint32 rtp_timestamp,
+ bool next_frame, scoped_ptr<EncodedVideoFrame>* encoded_frame,
+ base::TimeTicks* render_time) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeTicks now = cast_environment_->Clock()->NowTicks();
+ *render_time = GetRenderTime(now, rtp_timestamp);
+
+ // TODO(mikhal): Store actual render time and not diff.
+ cast_environment_->Logging()->InsertFrameEventWithDelay(kVideoRenderDelay,
+ rtp_timestamp, (*encoded_frame)->frame_id, now - *render_time);
+
+ // Minimum time before a frame is due to be rendered before we pull it for
+ // decode.
+ base::TimeDelta min_wait_delta = frame_delay_;
+ base::TimeDelta time_until_render = *render_time - now;
+ if (!next_frame && (time_until_render > min_wait_delta)) {
+ // Example:
+ // We have decoded frame 1 and we have received the complete frame 3, but
+ // not frame 2. If we still have time before frame 3 should be rendered we
+ // will wait for 2 to arrive, however if 2 never show up this timer will hit
+ // and we will pull out frame 3 for decoding and rendering.
+ base::TimeDelta time_until_release = time_until_render - min_wait_delta;
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoReceiver::PlayoutTimeout, weak_factory_.GetWeakPtr()),
+ time_until_release);
+ VLOG(2) << "Wait before releasing frame "
+ << static_cast<int>((*encoded_frame)->frame_id)
+ << " time " << time_until_release.InMilliseconds();
+ return false;
+ }
+
+ base::TimeDelta dont_show_timeout_delta =
+ base::TimeDelta::FromMilliseconds(-kDontShowTimeoutMs);
+ if (codec_ == kVp8 && time_until_render < dont_show_timeout_delta) {
+ (*encoded_frame)->data[0] &= 0xef;
+ VLOG(1) << "Don't show frame "
+ << static_cast<int>((*encoded_frame)->frame_id)
+ << " time_until_render:" << time_until_render.InMilliseconds();
+ } else {
+ VLOG(2) << "Show frame "
+ << static_cast<int>((*encoded_frame)->frame_id)
+ << " time_until_render:" << time_until_render.InMilliseconds();
+ }
+ // We have a copy of the frame, release this one.
+ framer_->ReleaseFrame((*encoded_frame)->frame_id);
+ (*encoded_frame)->codec = codec_;
+ return true;
+}
+
+void VideoReceiver::PlayoutTimeout() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (queued_encoded_callbacks_.empty()) return;
+
+ uint32 rtp_timestamp = 0;
+ bool next_frame = false;
+ scoped_ptr<EncodedVideoFrame> encoded_frame(new EncodedVideoFrame());
+
+ if (!framer_->GetEncodedVideoFrame(encoded_frame.get(), &rtp_timestamp,
+ &next_frame)) {
+ // We have no video frames. Wait for new packet(s).
+ // Since the application can post multiple VideoFrameEncodedCallback and
+ // we only check the next frame to play out we might have multiple timeout
+ // events firing after each other; however this should be a rare event.
+ VLOG(2) << "Failed to retrieved a complete frame at this point in time";
+ return;
+ }
+ VLOG(2) << "PlayoutTimeout retrieved frame "
+ << static_cast<int>(encoded_frame->frame_id);
+
+ if (decryptor_ && !DecryptVideoFrame(&encoded_frame)) {
+ // Logging already done.
+ return;
+ }
+
+ base::TimeTicks render_time;
+ if (PullEncodedVideoFrame(rtp_timestamp, next_frame, &encoded_frame,
+ &render_time)) {
+ if (!queued_encoded_callbacks_.empty()) {
+ VideoFrameEncodedCallback callback = queued_encoded_callbacks_.front();
+ queued_encoded_callbacks_.pop_front();
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(callback, base::Passed(&encoded_frame), render_time));
+ }
+ } else {
+ // We have a video frame; however we are missing packets and we have time
+ // to wait for new packet(s).
+ }
+}
+
+base::TimeTicks VideoReceiver::GetRenderTime(base::TimeTicks now,
+ uint32 rtp_timestamp) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ // Senders time in ms when this frame was captured.
+ // Note: the senders clock and our local clock might not be synced.
+ base::TimeTicks rtp_timestamp_in_ticks;
+
+ if (time_offset_.InMilliseconds() == 0) {
+ if (!rtcp_->RtpTimestampInSenderTime(kVideoFrequency,
+ incoming_rtp_timestamp_,
+ &rtp_timestamp_in_ticks)) {
+ // We have not received any RTCP to sync the stream play it out as soon as
+ // possible.
+ return now;
+ }
+ time_offset_ = time_incoming_packet_ - rtp_timestamp_in_ticks;
+ } else if (time_incoming_packet_updated_) {
+ if (rtcp_->RtpTimestampInSenderTime(kVideoFrequency,
+ incoming_rtp_timestamp_,
+ &rtp_timestamp_in_ticks)) {
+ // Time to update the time_offset.
+ base::TimeDelta time_offset =
+ time_incoming_packet_ - rtp_timestamp_in_ticks;
+ time_offset_ = ((kTimeOffsetFilter - 1) * time_offset_ + time_offset)
+ / kTimeOffsetFilter;
+ }
+ }
+ // Reset |time_incoming_packet_updated_| to enable a future measurement.
+ time_incoming_packet_updated_ = false;
+ if (!rtcp_->RtpTimestampInSenderTime(kVideoFrequency,
+ rtp_timestamp,
+ &rtp_timestamp_in_ticks)) {
+ // This can fail if we have not received any RTCP packets in a long time.
+ return now;
+ }
+ base::TimeTicks render_time =
+ rtp_timestamp_in_ticks + time_offset_ + target_delay_delta_;
+ // // Don't allow the render time to go backwards.
+ if (render_time < last_render_time_)
+ render_time = last_render_time_;
+ last_render_time_ = render_time;
+ return render_time;
+}
+
+void VideoReceiver::IncomingPacket(const uint8* packet, size_t length,
+ const base::Closure callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (Rtcp::IsRtcpPacket(packet, length)) {
+ rtcp_->IncomingRtcpPacket(packet, length);
+ } else {
+ rtp_receiver_.ReceivedPacket(packet, length);
+ }
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE, callback);
+}
+
+void VideoReceiver::IncomingParsedRtpPacket(const uint8* payload_data,
+ size_t payload_size,
+ const RtpCastHeader& rtp_header) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+
+ base::TimeTicks now = cast_environment_->Clock()->NowTicks();
+ if (time_incoming_packet_.is_null() || now - time_incoming_packet_ >
+ base::TimeDelta::FromMilliseconds(kMinTimeBetweenOffsetUpdatesMs)) {
+ if (time_incoming_packet_.is_null()) InitializeTimers();
+ incoming_rtp_timestamp_ = rtp_header.webrtc.header.timestamp;
+ time_incoming_packet_ = now;
+ time_incoming_packet_updated_ = true;
+ }
+
+ cast_environment_->Logging()->InsertPacketEvent(kPacketReceived,
+ rtp_header.webrtc.header.timestamp, rtp_header.frame_id,
+ rtp_header.packet_id, rtp_header.max_packet_id, payload_size);
+
+ bool complete = framer_->InsertPacket(payload_data, payload_size, rtp_header);
+
+ if (!complete) return; // Video frame not complete; wait for more packets.
+ if (queued_encoded_callbacks_.empty()) return; // No pending callback.
+
+ VideoFrameEncodedCallback callback = queued_encoded_callbacks_.front();
+ queued_encoded_callbacks_.pop_front();
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoReceiver::GetEncodedVideoFrame,
+ weak_factory_.GetWeakPtr(), callback));
+}
+
+// Send a cast feedback message. Actual message created in the framer (cast
+// message builder).
+void VideoReceiver::CastFeedback(const RtcpCastMessage& cast_message) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ // TODO(pwestin): wire up log messages.
+ rtcp_->SendRtcpFromRtpReceiver(&cast_message, NULL);
+ time_last_sent_cast_message_= cast_environment_->Clock()->NowTicks();
+}
+
+// Cast messages should be sent within a maximum interval. Schedule a call
+// if not triggered elsewhere, e.g. by the cast message_builder.
+void VideoReceiver::ScheduleNextCastMessage() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeTicks send_time;
+ framer_->TimeToSendNextCastMessage(&send_time);
+
+ base::TimeDelta time_to_send = send_time -
+ cast_environment_->Clock()->NowTicks();
+ time_to_send = std::max(time_to_send,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoReceiver::SendNextCastMessage,
+ weak_factory_.GetWeakPtr()), time_to_send);
+}
+
+void VideoReceiver::SendNextCastMessage() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ framer_->SendCastMessage(); // Will only send a message if it is time.
+ ScheduleNextCastMessage();
+}
+
+// Schedule the next RTCP report to be sent back to the sender.
+void VideoReceiver::ScheduleNextRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeDelta time_to_next = rtcp_->TimeToSendNextRtcpReport() -
+ cast_environment_->Clock()->NowTicks();
+
+ time_to_next = std::max(time_to_next,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoReceiver::SendNextRtcpReport,
+ weak_factory_.GetWeakPtr()), time_to_next);
+}
+
+void VideoReceiver::SendNextRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ rtcp_->SendRtcpFromRtpReceiver(NULL, NULL);
+ ScheduleNextRtcpReport();
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/video_sender/codecs/vp8/vp8_encoder.cc b/media/cast/video_sender/codecs/vp8/vp8_encoder.cc
new file mode 100644
index 0000000..b75ede1
--- /dev/null
+++ b/media/cast/video_sender/codecs/vp8/vp8_encoder.cc
@@ -0,0 +1,372 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// TODO (pwestin): add a link to the design document describing the generic
+// protocol and the VP8 specific details.
+#include "media/cast/video_sender/codecs/vp8/vp8_encoder.h"
+
+#include <vector>
+
+#include "base/logging.h"
+#include "media/base/video_frame.h"
+#include "media/cast/cast_defines.h"
+#include "third_party/libvpx/source/libvpx/vpx/vp8cx.h"
+
+namespace media {
+namespace cast {
+
+static const uint32 kMinIntra = 300;
+
+Vp8Encoder::Vp8Encoder(const VideoSenderConfig& video_config,
+ uint8 max_unacked_frames)
+ : cast_config_(video_config),
+ use_multiple_video_buffers_(
+ cast_config_.max_number_of_video_buffers_used ==
+ kNumberOfVp8VideoBuffers),
+ max_number_of_repeated_buffers_in_a_row_(
+ (max_unacked_frames > kNumberOfVp8VideoBuffers) ?
+ ((max_unacked_frames - 1) / kNumberOfVp8VideoBuffers) : 0),
+ config_(new vpx_codec_enc_cfg_t()),
+ encoder_(new vpx_codec_ctx_t()),
+ // Creating a wrapper to the image - setting image data to NULL. Actual
+ // pointer will be set during encode. Setting align to 1, as it is
+ // meaningless (actual memory is not allocated).
+ raw_image_(vpx_img_wrap(NULL, IMG_FMT_I420, video_config.width,
+ video_config.height, 1, NULL)),
+ key_frame_requested_(true),
+ timestamp_(0),
+ last_encoded_frame_id_(kStartFrameId),
+ number_of_repeated_buffers_(0) {
+ // TODO(pwestin): we need to figure out how to synchronize the acking with the
+ // internal state of the encoder, ideally the encoder will tell if we can
+ // send another frame.
+ DCHECK(!use_multiple_video_buffers_ ||
+ max_number_of_repeated_buffers_in_a_row_ == 0) << "Invalid config";
+
+ // VP8 have 3 buffers available for prediction, with
+ // max_number_of_video_buffers_used set to 1 we maximize the coding efficiency
+ // however in this mode we can not skip frames in the receiver to catch up
+ // after a temporary network outage; with max_number_of_video_buffers_used
+ // set to 3 we allow 2 frames to be skipped by the receiver without error
+ // propagation.
+ DCHECK(cast_config_.max_number_of_video_buffers_used == 1 ||
+ cast_config_.max_number_of_video_buffers_used ==
+ kNumberOfVp8VideoBuffers) << "Invalid argument";
+
+ for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
+ acked_frame_buffers_[i] = true;
+ used_buffers_frame_id_[i] = kStartFrameId;
+ }
+ InitEncode(video_config.number_of_cores);
+}
+
+Vp8Encoder::~Vp8Encoder() {
+ vpx_codec_destroy(encoder_);
+ vpx_img_free(raw_image_);
+}
+
+void Vp8Encoder::InitEncode(int number_of_cores) {
+ // Populate encoder configuration with default values.
+ if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), config_.get(), 0)) {
+ DCHECK(false) << "Invalid return value";
+ }
+ config_->g_w = cast_config_.width;
+ config_->g_h = cast_config_.height;
+ config_->rc_target_bitrate = cast_config_.start_bitrate / 1000; // In kbit/s.
+
+ // Setting the codec time base.
+ config_->g_timebase.num = 1;
+ config_->g_timebase.den = kVideoFrequency;
+ config_->g_lag_in_frames = 0;
+ config_->kf_mode = VPX_KF_DISABLED;
+ if (use_multiple_video_buffers_) {
+ // We must enable error resilience when we use multiple buffers, due to
+ // codec requirements.
+ config_->g_error_resilient = 1;
+ }
+
+ if (cast_config_.width * cast_config_.height > 640 * 480
+ && number_of_cores >= 2) {
+ config_->g_threads = 2; // 2 threads for qHD/HD.
+ } else {
+ config_->g_threads = 1; // 1 thread for VGA or less.
+ }
+
+ // Rate control settings.
+ // TODO(pwestin): revisit these constants. Currently identical to webrtc.
+ config_->rc_dropframe_thresh = 30;
+ config_->rc_end_usage = VPX_CBR;
+ config_->g_pass = VPX_RC_ONE_PASS;
+ config_->rc_resize_allowed = 0;
+ config_->rc_min_quantizer = cast_config_.min_qp;
+ config_->rc_max_quantizer = cast_config_.max_qp;
+ config_->rc_undershoot_pct = 100;
+ config_->rc_overshoot_pct = 15;
+ config_->rc_buf_initial_sz = 500;
+ config_->rc_buf_optimal_sz = 600;
+ config_->rc_buf_sz = 1000;
+
+ // set the maximum target size of any key-frame.
+ uint32 rc_max_intra_target = MaxIntraTarget(config_->rc_buf_optimal_sz);
+ vpx_codec_flags_t flags = 0;
+ // TODO(mikhal): Tune settings.
+ if (vpx_codec_enc_init(encoder_, vpx_codec_vp8_cx(), config_.get(), flags)) {
+ DCHECK(false) << "Invalid return value";
+ }
+ vpx_codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 1);
+ vpx_codec_control(encoder_, VP8E_SET_NOISE_SENSITIVITY, 0);
+ vpx_codec_control(encoder_, VP8E_SET_CPUUSED, -6);
+ vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
+ rc_max_intra_target);
+}
+
+bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame,
+ EncodedVideoFrame* encoded_image) {
+ // Image in vpx_image_t format.
+ // Input image is const. VP8's raw image is not defined as const.
+ raw_image_->planes[PLANE_Y] =
+ const_cast<uint8*>(video_frame->data(VideoFrame::kYPlane));
+ raw_image_->planes[PLANE_U] =
+ const_cast<uint8*>(video_frame->data(VideoFrame::kUPlane));
+ raw_image_->planes[PLANE_V] =
+ const_cast<uint8*>(video_frame->data(VideoFrame::kVPlane));
+
+ raw_image_->stride[VPX_PLANE_Y] = video_frame->stride(VideoFrame::kYPlane);
+ raw_image_->stride[VPX_PLANE_U] = video_frame->stride(VideoFrame::kUPlane);
+ raw_image_->stride[VPX_PLANE_V] = video_frame->stride(VideoFrame::kVPlane);
+
+ uint8 latest_frame_id_to_reference;
+ Vp8Buffers buffer_to_update;
+ vpx_codec_flags_t flags = 0;
+ if (key_frame_requested_) {
+ flags = VPX_EFLAG_FORCE_KF;
+ // Self reference.
+ latest_frame_id_to_reference =
+ static_cast<uint8>(last_encoded_frame_id_ + 1);
+ // We can pick any buffer as buffer_to_update since we update
+ // them all.
+ buffer_to_update = kLastBuffer;
+ } else {
+ // Reference all acked frames (buffers).
+ latest_frame_id_to_reference = GetLatestFrameIdToReference();
+ GetCodecReferenceFlags(&flags);
+ buffer_to_update = GetNextBufferToUpdate();
+ GetCodecUpdateFlags(buffer_to_update, &flags);
+ }
+
+ // Note: The duration does not reflect the real time between frames. This is
+ // done to keep the encoder happy.
+ uint32 duration = kVideoFrequency / cast_config_.max_frame_rate;
+ if (vpx_codec_encode(encoder_, raw_image_, timestamp_, duration, flags,
+ VPX_DL_REALTIME)) {
+ return false;
+ }
+ timestamp_ += duration;
+
+ // Get encoded frame.
+ const vpx_codec_cx_pkt_t *pkt = NULL;
+ vpx_codec_iter_t iter = NULL;
+ size_t total_size = 0;
+ while ((pkt = vpx_codec_get_cx_data(encoder_, &iter)) != NULL) {
+ if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
+ total_size += pkt->data.frame.sz;
+ encoded_image->data.reserve(total_size);
+ encoded_image->data.insert(
+ encoded_image->data.end(),
+ static_cast<const uint8*>(pkt->data.frame.buf),
+ static_cast<const uint8*>(pkt->data.frame.buf) +
+ pkt->data.frame.sz);
+ if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
+ encoded_image->key_frame = true;
+ } else {
+ encoded_image->key_frame = false;
+ }
+ }
+ }
+ // Don't update frame_id for zero size frames.
+ if (total_size == 0) return true;
+
+ // Populate the encoded frame.
+ encoded_image->codec = kVp8;
+ encoded_image->last_referenced_frame_id = latest_frame_id_to_reference;
+ encoded_image->frame_id = ++last_encoded_frame_id_;
+
+ VLOG(2) << "VP8 encoded frame:" << static_cast<int>(encoded_image->frame_id)
+ << " sized:" << total_size;
+
+ if (encoded_image->key_frame) {
+ key_frame_requested_ = false;
+
+ for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
+ used_buffers_frame_id_[i] = encoded_image->frame_id;
+ }
+ // We can pick any buffer as last_used_vp8_buffer_ since we update
+ // them all.
+ last_used_vp8_buffer_ = buffer_to_update;
+ } else {
+ if (buffer_to_update != kNoBuffer) {
+ acked_frame_buffers_[buffer_to_update] = false;
+ used_buffers_frame_id_[buffer_to_update] = encoded_image->frame_id;
+ last_used_vp8_buffer_ = buffer_to_update;
+ }
+ }
+ return true;
+}
+
+void Vp8Encoder::GetCodecReferenceFlags(vpx_codec_flags_t* flags) {
+ if (!use_multiple_video_buffers_) return;
+
+ // We need to reference something.
+ DCHECK(acked_frame_buffers_[kAltRefBuffer] ||
+ acked_frame_buffers_[kGoldenBuffer] ||
+ acked_frame_buffers_[kLastBuffer]) << "Invalid state";
+
+ if (!acked_frame_buffers_[kAltRefBuffer]) {
+ *flags |= VP8_EFLAG_NO_REF_ARF;
+ }
+ if (!acked_frame_buffers_[kGoldenBuffer]) {
+ *flags |= VP8_EFLAG_NO_REF_GF;
+ }
+ if (!acked_frame_buffers_[kLastBuffer]) {
+ *flags |= VP8_EFLAG_NO_REF_LAST;
+ }
+}
+
+uint32 Vp8Encoder::GetLatestFrameIdToReference() {
+ if (!use_multiple_video_buffers_) return last_encoded_frame_id_;
+
+ int latest_frame_id_to_reference = -1;
+ if (acked_frame_buffers_[kAltRefBuffer]) {
+ latest_frame_id_to_reference = used_buffers_frame_id_[kAltRefBuffer];
+ }
+ if (acked_frame_buffers_[kGoldenBuffer]) {
+ if (latest_frame_id_to_reference == -1) {
+ latest_frame_id_to_reference = used_buffers_frame_id_[kGoldenBuffer];
+ } else {
+ if (IsNewerFrameId(used_buffers_frame_id_[kGoldenBuffer],
+ latest_frame_id_to_reference)) {
+ latest_frame_id_to_reference = used_buffers_frame_id_[kGoldenBuffer];
+ }
+ }
+ }
+ if (acked_frame_buffers_[kLastBuffer]) {
+ if (latest_frame_id_to_reference == -1) {
+ latest_frame_id_to_reference = used_buffers_frame_id_[kLastBuffer];
+ } else {
+ if (IsNewerFrameId(used_buffers_frame_id_[kLastBuffer],
+ latest_frame_id_to_reference)) {
+ latest_frame_id_to_reference = used_buffers_frame_id_[kLastBuffer];
+ }
+ }
+ }
+ DCHECK(latest_frame_id_to_reference != -1) << "Invalid state";
+ return static_cast<uint32>(latest_frame_id_to_reference);
+}
+
+Vp8Encoder::Vp8Buffers Vp8Encoder::GetNextBufferToUpdate() {
+ // Update at most one buffer, except for key-frames.
+
+ Vp8Buffers buffer_to_update;
+ if (number_of_repeated_buffers_ < max_number_of_repeated_buffers_in_a_row_) {
+ // TODO(pwestin): experiment with this. The issue with only this change is
+ // that we can end up with only 4 frames in flight when we expect 6.
+ // buffer_to_update = last_used_vp8_buffer_;
+ buffer_to_update = kNoBuffer;
+ ++number_of_repeated_buffers_;
+ } else {
+ number_of_repeated_buffers_ = 0;
+ switch (last_used_vp8_buffer_) {
+ case kAltRefBuffer:
+ buffer_to_update = kLastBuffer;
+ VLOG(2) << "VP8 update last buffer";
+ break;
+ case kLastBuffer:
+ buffer_to_update = kGoldenBuffer;
+ VLOG(2) << "VP8 update golden buffer";
+ break;
+ case kGoldenBuffer:
+ buffer_to_update = kAltRefBuffer;
+ VLOG(2) << "VP8 update alt-ref buffer";
+ break;
+ case kNoBuffer:
+ DCHECK(false) << "Invalid state";
+ break;
+ }
+ }
+ return buffer_to_update;
+}
+
+void Vp8Encoder::GetCodecUpdateFlags(Vp8Buffers buffer_to_update,
+ vpx_codec_flags_t* flags) {
+ if (!use_multiple_video_buffers_) return;
+
+ // Update at most one buffer, except for key-frames.
+ switch (buffer_to_update) {
+ case kAltRefBuffer:
+ *flags |= VP8_EFLAG_NO_UPD_GF;
+ *flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kLastBuffer:
+ *flags |= VP8_EFLAG_NO_UPD_GF;
+ *flags |= VP8_EFLAG_NO_UPD_ARF;
+ break;
+ case kGoldenBuffer:
+ *flags |= VP8_EFLAG_NO_UPD_ARF;
+ *flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kNoBuffer:
+ *flags |= VP8_EFLAG_NO_UPD_ARF;
+ *flags |= VP8_EFLAG_NO_UPD_GF;
+ *flags |= VP8_EFLAG_NO_UPD_LAST;
+ *flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ }
+}
+
+void Vp8Encoder::UpdateRates(uint32 new_bitrate) {
+ uint32 new_bitrate_kbit = new_bitrate / 1000;
+ if (config_->rc_target_bitrate == new_bitrate_kbit) return;
+
+ config_->rc_target_bitrate = new_bitrate_kbit;
+
+ // Update encoder context.
+ if (vpx_codec_enc_config_set(encoder_, config_.get())) {
+ DCHECK(false) << "Invalid return value";
+ }
+}
+
+void Vp8Encoder::LatestFrameIdToReference(uint32 frame_id) {
+ if (!use_multiple_video_buffers_) return;
+
+ VLOG(2) << "VP8 ok to reference frame:" << static_cast<int>(frame_id);
+ for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) {
+ if (frame_id == used_buffers_frame_id_[i]) {
+ acked_frame_buffers_[i] = true;
+ }
+ }
+}
+
+void Vp8Encoder::GenerateKeyFrame() {
+ key_frame_requested_ = true;
+}
+
+// Calculate the max size of the key frame relative to a normal delta frame.
+uint32 Vp8Encoder::MaxIntraTarget(uint32 optimal_buffer_size_ms) const {
+ // Set max to the optimal buffer level (normalized by target BR),
+ // and scaled by a scale_parameter.
+ // Max target size = scalePar * optimalBufferSize * targetBR[Kbps].
+ // This values is presented in percentage of perFrameBw:
+ // perFrameBw = targetBR[Kbps] * 1000 / frameRate.
+ // The target in % is as follows:
+
+ float scale_parameter = 0.5;
+ uint32 target_pct = optimal_buffer_size_ms * scale_parameter *
+ cast_config_.max_frame_rate / 10;
+
+ // Don't go below 3 times the per frame bandwidth.
+ return std::max(target_pct, kMinIntra);
+}
+
+} // namespace cast
+} // namespace media
diff --git a/media/cast/video_sender/video_sender.cc b/media/cast/video_sender/video_sender.cc
new file mode 100644
index 0000000..15a4042
--- /dev/null
+++ b/media/cast/video_sender/video_sender.cc
@@ -0,0 +1,455 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cast/video_sender/video_sender.h"
+
+#include <list>
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/cast/cast_defines.h"
+#include "media/cast/net/pacing/paced_sender.h"
+#include "media/cast/video_sender/video_encoder.h"
+
+namespace media {
+namespace cast {
+
+const int64 kMinSchedulingDelayMs = 1;
+
+class LocalRtcpVideoSenderFeedback : public RtcpSenderFeedback {
+ public:
+ explicit LocalRtcpVideoSenderFeedback(VideoSender* video_sender)
+ : video_sender_(video_sender) {
+ }
+
+ virtual void OnReceivedCastFeedback(
+ const RtcpCastMessage& cast_feedback) OVERRIDE {
+ video_sender_->OnReceivedCastFeedback(cast_feedback);
+ }
+
+ private:
+ VideoSender* video_sender_;
+};
+
+class LocalRtpVideoSenderStatistics : public RtpSenderStatistics {
+ public:
+ explicit LocalRtpVideoSenderStatistics(RtpSender* rtp_sender)
+ : rtp_sender_(rtp_sender) {
+ }
+
+ virtual void GetStatistics(const base::TimeTicks& now,
+ RtcpSenderInfo* sender_info) OVERRIDE {
+ rtp_sender_->RtpStatistics(now, sender_info);
+ }
+
+ private:
+ RtpSender* rtp_sender_;
+};
+
+VideoSender::VideoSender(
+ scoped_refptr<CastEnvironment> cast_environment,
+ const VideoSenderConfig& video_config,
+ VideoEncoderController* const video_encoder_controller,
+ PacedPacketSender* const paced_packet_sender)
+ : rtp_max_delay_(
+ base::TimeDelta::FromMilliseconds(video_config.rtp_max_delay_ms)),
+ max_frame_rate_(video_config.max_frame_rate),
+ cast_environment_(cast_environment),
+ rtcp_feedback_(new LocalRtcpVideoSenderFeedback(this)),
+ rtp_sender_(new RtpSender(cast_environment, NULL, &video_config,
+ paced_packet_sender)),
+ last_acked_frame_id_(-1),
+ last_sent_frame_id_(-1),
+ duplicate_ack_(0),
+ last_skip_count_(0),
+ congestion_control_(cast_environment->Clock(),
+ video_config.congestion_control_back_off,
+ video_config.max_bitrate,
+ video_config.min_bitrate,
+ video_config.start_bitrate),
+ initialized_(false),
+ weak_factory_(this) {
+ max_unacked_frames_ = static_cast<uint8>(video_config.rtp_max_delay_ms *
+ video_config.max_frame_rate / 1000) + 1;
+ VLOG(1) << "max_unacked_frames " << static_cast<int>(max_unacked_frames_);
+ DCHECK_GT(max_unacked_frames_, 0) << "Invalid argument";
+
+ rtp_video_sender_statistics_.reset(
+ new LocalRtpVideoSenderStatistics(rtp_sender_.get()));
+
+ if (video_config.use_external_encoder) {
+ DCHECK(video_encoder_controller) << "Invalid argument";
+ video_encoder_controller_ = video_encoder_controller;
+ } else {
+ video_encoder_.reset(new VideoEncoder(cast_environment, video_config,
+ max_unacked_frames_));
+ video_encoder_controller_ = video_encoder_.get();
+ }
+
+ if (video_config.aes_iv_mask.size() == kAesKeySize &&
+ video_config.aes_key.size() == kAesKeySize) {
+ iv_mask_ = video_config.aes_iv_mask;
+ crypto::SymmetricKey* key = crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES, video_config.aes_key);
+ encryptor_.reset(new crypto::Encryptor());
+ encryptor_->Init(key, crypto::Encryptor::CTR, std::string());
+ } else if (video_config.aes_iv_mask.size() != 0 ||
+ video_config.aes_key.size() != 0) {
+ DCHECK(false) << "Invalid crypto configuration";
+ }
+
+ rtcp_.reset(new Rtcp(
+ cast_environment_,
+ rtcp_feedback_.get(),
+ paced_packet_sender,
+ rtp_video_sender_statistics_.get(),
+ NULL,
+ video_config.rtcp_mode,
+ base::TimeDelta::FromMilliseconds(video_config.rtcp_interval),
+ video_config.sender_ssrc,
+ video_config.incoming_feedback_ssrc,
+ video_config.rtcp_c_name));
+}
+
+VideoSender::~VideoSender() {}
+
+void VideoSender::InitializeTimers() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (!initialized_) {
+ initialized_ = true;
+ ScheduleNextRtcpReport();
+ ScheduleNextResendCheck();
+ ScheduleNextSkippedFramesCheck();
+ }
+}
+
+void VideoSender::InsertRawVideoFrame(
+ const scoped_refptr<media::VideoFrame>& video_frame,
+ const base::TimeTicks& capture_time) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ DCHECK(video_encoder_.get()) << "Invalid state";
+ cast_environment_->Logging()->InsertFrameEvent(kVideoFrameReceived,
+ GetVideoRtpTimestamp(capture_time), kFrameIdUnknown);
+
+ if (!video_encoder_->EncodeVideoFrame(video_frame, capture_time,
+ base::Bind(&VideoSender::SendEncodedVideoFrameMainThread,
+ weak_factory_.GetWeakPtr()))) {
+ }
+}
+
+void VideoSender::InsertCodedVideoFrame(const EncodedVideoFrame* encoded_frame,
+ const base::TimeTicks& capture_time,
+ const base::Closure callback) {
+ DCHECK(!video_encoder_.get()) << "Invalid state";
+ DCHECK(encoded_frame) << "Invalid argument";
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+
+ SendEncodedVideoFrame(encoded_frame, capture_time);
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE, callback);
+}
+
+void VideoSender::SendEncodedVideoFrameMainThread(
+ scoped_ptr<EncodedVideoFrame> video_frame,
+ const base::TimeTicks& capture_time) {
+ SendEncodedVideoFrame(video_frame.get(), capture_time);
+}
+
+bool VideoSender::EncryptVideoFrame(const EncodedVideoFrame& video_frame,
+ EncodedVideoFrame* encrypted_frame) {
+ DCHECK(encryptor_) << "Invalid state";
+
+ if (!encryptor_->SetCounter(GetAesNonce(video_frame.frame_id, iv_mask_))) {
+ NOTREACHED() << "Failed to set counter";
+ return false;
+ }
+
+ if (!encryptor_->Encrypt(video_frame.data, &encrypted_frame->data)) {
+ NOTREACHED() << "Encrypt error";
+ return false;
+ }
+ encrypted_frame->codec = video_frame.codec;
+ encrypted_frame->key_frame = video_frame.key_frame;
+ encrypted_frame->frame_id = video_frame.frame_id;
+ encrypted_frame->last_referenced_frame_id =
+ video_frame.last_referenced_frame_id;
+ return true;
+}
+
+void VideoSender::SendEncodedVideoFrame(const EncodedVideoFrame* encoded_frame,
+ const base::TimeTicks& capture_time) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ last_send_time_ = cast_environment_->Clock()->NowTicks();
+
+ if (encryptor_) {
+ EncodedVideoFrame encrypted_video_frame;
+
+ if (!EncryptVideoFrame(*encoded_frame, &encrypted_video_frame)) {
+ // Logging already done.
+ return;
+ }
+ rtp_sender_->IncomingEncodedVideoFrame(&encrypted_video_frame,
+ capture_time);
+ } else {
+ rtp_sender_->IncomingEncodedVideoFrame(encoded_frame, capture_time);
+ }
+ if (encoded_frame->key_frame) {
+ VLOG(2) << "Send encoded key frame; frame_id:"
+ << static_cast<int>(encoded_frame->frame_id);
+ }
+ last_sent_frame_id_ = static_cast<int>(encoded_frame->frame_id);
+ UpdateFramesInFlight();
+ InitializeTimers();
+}
+
+void VideoSender::IncomingRtcpPacket(const uint8* packet, size_t length,
+ const base::Closure callback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ rtcp_->IncomingRtcpPacket(packet, length);
+ cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE, callback);
+}
+
+void VideoSender::ScheduleNextRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeDelta time_to_next = rtcp_->TimeToSendNextRtcpReport() -
+ cast_environment_->Clock()->NowTicks();
+
+ time_to_next = std::max(time_to_next,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoSender::SendRtcpReport, weak_factory_.GetWeakPtr()),
+ time_to_next);
+}
+
+void VideoSender::SendRtcpReport() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+
+ RtcpSenderLogMessage sender_log_message;
+ const FrameRawMap& frame_raw_map =
+ cast_environment_->Logging()->GetFrameRawData();
+
+ FrameRawMap::const_iterator it = frame_raw_map.begin();
+ while (it != frame_raw_map.end()) {
+ RtcpSenderFrameLogMessage frame_message;
+ frame_message.rtp_timestamp = it->first;
+ frame_message.frame_status = kRtcpSenderFrameStatusUnknown;
+ if (it->second.type.empty()) {
+ ++it;
+ continue;
+ }
+ CastLoggingEvent last_event = it->second.type.back();
+ switch (last_event) {
+ case kVideoFrameCaptured:
+ frame_message.frame_status = kRtcpSenderFrameStatusDroppedByFlowControl;
+ break;
+ case kVideoFrameSentToEncoder:
+ frame_message.frame_status = kRtcpSenderFrameStatusDroppedByEncoder;
+ break;
+ case kVideoFrameEncoded:
+ frame_message.frame_status = kRtcpSenderFrameStatusSentToNetwork;
+ break;
+ default:
+ ++it;
+ continue;
+ }
+ ++it;
+ if (it == frame_raw_map.end()) {
+ // Last message on our map; only send if it is kVideoFrameEncoded.
+ if (last_event != kVideoFrameEncoded) {
+ // For other events we will wait for it to finish and report the result
+ // in the next report.
+ break;
+ }
+ }
+ sender_log_message.push_back(frame_message);
+ }
+ rtcp_->SendRtcpFromRtpSender(&sender_log_message);
+ if (!sender_log_message.empty()) {
+ VLOG(1) << "Failed to send all log messages";
+ }
+
+ // TODO(pwestin): When we start pulling out the logging by other means we need
+ // to synchronize this.
+ cast_environment_->Logging()->Reset();
+ ScheduleNextRtcpReport();
+}
+
+void VideoSender::ScheduleNextResendCheck() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeDelta time_to_next;
+ if (last_send_time_.is_null()) {
+ time_to_next = rtp_max_delay_;
+ } else {
+ time_to_next = last_send_time_ - cast_environment_->Clock()->NowTicks() +
+ rtp_max_delay_;
+ }
+ time_to_next = std::max(time_to_next,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoSender::ResendCheck, weak_factory_.GetWeakPtr()),
+ time_to_next);
+}
+
+void VideoSender::ResendCheck() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (!last_send_time_.is_null() && last_sent_frame_id_ != -1) {
+ base::TimeDelta time_since_last_send =
+ cast_environment_->Clock()->NowTicks() - last_send_time_;
+ if (time_since_last_send > rtp_max_delay_) {
+ if (last_acked_frame_id_ == -1) {
+ // We have not received any ack, send a key frame.
+ video_encoder_controller_->GenerateKeyFrame();
+ last_acked_frame_id_ = -1;
+ last_sent_frame_id_ = -1;
+ UpdateFramesInFlight();
+ } else {
+ DCHECK_LE(0, last_acked_frame_id_);
+
+ uint32 frame_id = static_cast<uint32>(last_acked_frame_id_ + 1);
+ VLOG(1) << "ACK timeout resend frame:" << static_cast<int>(frame_id);
+ ResendFrame(frame_id);
+ }
+ }
+ }
+ ScheduleNextResendCheck();
+}
+
+void VideoSender::ScheduleNextSkippedFramesCheck() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeDelta time_to_next;
+ if (last_checked_skip_count_time_.is_null()) {
+ time_to_next =
+ base::TimeDelta::FromMilliseconds(kSkippedFramesCheckPeriodkMs);
+ } else {
+ time_to_next = last_checked_skip_count_time_ -
+ cast_environment_->Clock()->NowTicks() +
+ base::TimeDelta::FromMilliseconds(kSkippedFramesCheckPeriodkMs);
+ }
+ time_to_next = std::max(time_to_next,
+ base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+
+ cast_environment_->PostDelayedTask(CastEnvironment::MAIN, FROM_HERE,
+ base::Bind(&VideoSender::SkippedFramesCheck, weak_factory_.GetWeakPtr()),
+ time_to_next);
+}
+
+void VideoSender::SkippedFramesCheck() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ int skip_count = video_encoder_controller_->NumberOfSkippedFrames();
+ if (skip_count - last_skip_count_ >
+ kSkippedFramesThreshold * max_frame_rate_) {
+ // TODO(pwestin): Propagate this up to the application.
+ }
+ last_skip_count_ = skip_count;
+ last_checked_skip_count_time_ = cast_environment_->Clock()->NowTicks();
+ ScheduleNextSkippedFramesCheck();
+}
+
+void VideoSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ base::TimeDelta rtt;
+ base::TimeDelta avg_rtt;
+ base::TimeDelta min_rtt;
+ base::TimeDelta max_rtt;
+
+ if (rtcp_->Rtt(&rtt, &avg_rtt, &min_rtt, &max_rtt)) {
+ cast_environment_->Logging()->InsertGenericEvent(kRttMs,
+ rtt.InMilliseconds());
+ // Don't use a RTT lower than our average.
+ rtt = std::max(rtt, avg_rtt);
+ } else {
+ // We have no measured value use default.
+ rtt = base::TimeDelta::FromMilliseconds(kStartRttMs);
+ }
+ if (cast_feedback.missing_frames_and_packets_.empty()) {
+ // No lost packets.
+ int resend_frame = -1;
+ if (last_sent_frame_id_ == -1) return;
+
+ video_encoder_controller_->LatestFrameIdToReference(
+ cast_feedback.ack_frame_id_);
+
+ if (static_cast<uint32>(last_acked_frame_id_ + 1) ==
+ cast_feedback.ack_frame_id_) {
+ uint32 new_bitrate = 0;
+ if (congestion_control_.OnAck(rtt, &new_bitrate)) {
+ video_encoder_controller_->SetBitRate(new_bitrate);
+ }
+ }
+ if (static_cast<uint32>(last_acked_frame_id_) == cast_feedback.ack_frame_id_
+ // We only count duplicate ACKs when we have sent newer frames.
+ && IsNewerFrameId(last_sent_frame_id_, last_acked_frame_id_)) {
+ duplicate_ack_++;
+ } else {
+ duplicate_ack_ = 0;
+ }
+ if (duplicate_ack_ >= 2 && duplicate_ack_ % 3 == 2) {
+ // Resend last ACK + 1 frame.
+ resend_frame = static_cast<uint32>(last_acked_frame_id_ + 1);
+ }
+ if (resend_frame != -1) {
+ DCHECK_LE(0, resend_frame);
+ VLOG(1) << "Received duplicate ACK for frame:"
+ << static_cast<int>(resend_frame);
+ ResendFrame(static_cast<uint32>(resend_frame));
+ }
+ } else {
+ rtp_sender_->ResendPackets(cast_feedback.missing_frames_and_packets_);
+ last_send_time_ = cast_environment_->Clock()->NowTicks();
+
+ uint32 new_bitrate = 0;
+ if (congestion_control_.OnNack(rtt, &new_bitrate)) {
+ video_encoder_controller_->SetBitRate(new_bitrate);
+ }
+ }
+ ReceivedAck(cast_feedback.ack_frame_id_);
+}
+
+void VideoSender::ReceivedAck(uint32 acked_frame_id) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ last_acked_frame_id_ = static_cast<int>(acked_frame_id);
+ cast_environment_->Logging()->InsertGenericEvent(kAckReceived,
+ acked_frame_id);
+ VLOG(2) << "ReceivedAck:" << static_cast<int>(acked_frame_id);
+ last_acked_frame_id_ = acked_frame_id;
+ UpdateFramesInFlight();
+}
+
+void VideoSender::UpdateFramesInFlight() {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ if (last_sent_frame_id_ != -1) {
+ DCHECK_LE(0, last_sent_frame_id_);
+ uint32 frames_in_flight;
+ if (last_acked_frame_id_ != -1) {
+ DCHECK_LE(0, last_acked_frame_id_);
+ frames_in_flight = static_cast<uint32>(last_sent_frame_id_) -
+ static_cast<uint32>(last_acked_frame_id_);
+ } else {
+ frames_in_flight = static_cast<uint32>(last_sent_frame_id_) + 1;
+ }
+ VLOG(2) << "Frames in flight; last sent: " << last_sent_frame_id_
+ << " last acked:" << last_acked_frame_id_;
+ if (frames_in_flight >= max_unacked_frames_) {
+ video_encoder_controller_->SkipNextFrame(true);
+ return;
+ }
+ }
+ video_encoder_controller_->SkipNextFrame(false);
+}
+
+void VideoSender::ResendFrame(uint32 resend_frame_id) {
+ DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
+ MissingFramesAndPacketsMap missing_frames_and_packets;
+ PacketIdSet missing;
+ missing_frames_and_packets.insert(std::make_pair(resend_frame_id, missing));
+ rtp_sender_->ResendPackets(missing_frames_and_packets);
+ last_send_time_ = cast_environment_->Clock()->NowTicks();
+}
+
+} // namespace cast
+} // namespace media