diff options
author | mikhal@google.com <mikhal@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-03-10 16:28:33 +0000 |
---|---|---|
committer | mikhal@google.com <mikhal@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-03-10 16:28:33 +0000 |
commit | f4b0da1353f3b435e7bff40d50fbbfe696732839 (patch) | |
tree | 74d604ba68b6128b4acece39c79957e55f7fcca7 | |
parent | 232f3511342e05eef6986cd7777dd553fa1a8a40 (diff) | |
download | chromium_src-f4b0da1353f3b435e7bff40d50fbbfe696732839.zip chromium_src-f4b0da1353f3b435e7bff40d50fbbfe696732839.tar.gz chromium_src-f4b0da1353f3b435e7bff40d50fbbfe696732839.tar.bz2 |
Cast: Refactoring Cast API's
Main changes:
1. Adding an IntializeAudio and InitializeVideo to CastSender.
2. Separating FrameInput to audio and video which enables initializing each separately.
3. Changing the CastSender and CastReceiver Create functions to return a scoped_ptr.
These changes better align Cast with the Chromium pipeline.
BUG=346822
R=hclam@chromium.org
Review URL: https://codereview.chromium.org/163553006
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@255954 0039d316-1c4b-4281-b951-d872f2087c98
21 files changed, 358 insertions, 377 deletions
diff --git a/chrome/renderer/media/cast_rtp_stream.cc b/chrome/renderer/media/cast_rtp_stream.cc index 16e5d1e..3a0fb19 100644 --- a/chrome/renderer/media/cast_rtp_stream.cc +++ b/chrome/renderer/media/cast_rtp_stream.cc @@ -155,7 +155,8 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, // Attach this sink to MediaStreamTrack. This method call must // be made on the render thread. Incoming data can then be // passed to media::cast::FrameInput on any thread. - void AddToTrack(const scoped_refptr<media::cast::FrameInput>& frame_input) { + void AddToTrack( + const scoped_refptr<media::cast::VideoFrameInput>& frame_input) { DCHECK(render_thread_task_runner_->BelongsToCurrentThread()); frame_input_ = frame_input; @@ -167,7 +168,7 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, private: blink::WebMediaStreamTrack track_; - scoped_refptr<media::cast::FrameInput> frame_input_; + scoped_refptr<media::cast::VideoFrameInput> frame_input_; bool sink_added_; CastRtpStream::ErrorCallback error_callback_; scoped_refptr<base::SingleThreadTaskRunner> render_thread_task_runner_; @@ -301,7 +302,8 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>, } // See CastVideoSink for details. - void AddToTrack(const scoped_refptr<media::cast::FrameInput>& frame_input) { + void AddToTrack( + const scoped_refptr<media::cast::AudioFrameInput>& frame_input) { DCHECK(render_thread_task_runner_->BelongsToCurrentThread()); frame_input_ = frame_input; if (!sink_added_) { @@ -316,7 +318,7 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>, private: blink::WebMediaStreamTrack track_; - scoped_refptr<media::cast::FrameInput> frame_input_; + scoped_refptr<media::cast::AudioFrameInput> frame_input_; bool sink_added_; CastRtpStream::ErrorCallback error_callback_; base::WeakPtrFactory<CastAudioSink> weak_factory_; @@ -348,26 +350,19 @@ CastRtpPayloadParams::CastRtpPayloadParams() min_bitrate(0), channels(0), width(0), - height(0) { -} + height(0) {} -CastRtpPayloadParams::~CastRtpPayloadParams() { -} +CastRtpPayloadParams::~CastRtpPayloadParams() {} -CastRtpParams::CastRtpParams() { -} +CastRtpParams::CastRtpParams() {} -CastRtpParams::~CastRtpParams() { -} +CastRtpParams::~CastRtpParams() {} CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track, const scoped_refptr<CastSession>& session) - : track_(track), - cast_session_(session), - weak_factory_(this) {} + : track_(track), cast_session_(session), weak_factory_(this) {} -CastRtpStream::~CastRtpStream() { -} +CastRtpStream::~CastRtpStream() {} std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() { if (IsAudio()) @@ -376,9 +371,7 @@ std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() { return SupportedVideoParams(); } -CastRtpParams CastRtpStream::GetParams() { - return params_; -} +CastRtpParams CastRtpStream::GetParams() { return params_; } void CastRtpStream::Start(const CastRtpParams& params, const base::Closure& start_callback, @@ -404,8 +397,7 @@ void CastRtpStream::Start(const CastRtpParams& params, params.payload.clock_rate)); cast_session_->StartAudio( config, - base::Bind(&CastAudioSink::AddToTrack, - audio_sink_->AsWeakPtr())); + base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr())); start_callback.Run(); } else { VideoSenderConfig config; @@ -420,8 +412,7 @@ void CastRtpStream::Start(const CastRtpParams& params, weak_factory_.GetWeakPtr())))); cast_session_->StartVideo( config, - base::Bind(&CastVideoSink::AddToTrack, - video_sink_->AsWeakPtr())); + base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr())); start_callback.Run(); } } diff --git a/chrome/renderer/media/cast_session.cc b/chrome/renderer/media/cast_session.cc index e1ed6d5..788225c 100644 --- a/chrome/renderer/media/cast_session.cc +++ b/chrome/renderer/media/cast_session.cc @@ -24,7 +24,7 @@ CastSession::~CastSession() { } void CastSession::StartAudio(const media::cast::AudioSenderConfig& config, - const FrameInputAvailableCallback& callback) { + const AudioFrameInputAvailableCallback& callback) { DCHECK(content::RenderThread::Get() ->GetMessageLoop() ->message_loop_proxy() @@ -39,7 +39,7 @@ void CastSession::StartAudio(const media::cast::AudioSenderConfig& config, } void CastSession::StartVideo(const media::cast::VideoSenderConfig& config, - const FrameInputAvailableCallback& callback) { + const VideoFrameInputAvailableCallback& callback) { DCHECK(content::RenderThread::Get() ->GetMessageLoop() ->message_loop_proxy() diff --git a/chrome/renderer/media/cast_session.h b/chrome/renderer/media/cast_session.h index ae7d244..15226bf 100644 --- a/chrome/renderer/media/cast_session.h +++ b/chrome/renderer/media/cast_session.h @@ -21,7 +21,8 @@ class MessageLoopProxy; namespace media { class VideoFrame; namespace cast { -class FrameInput; +class AudioFrameInput; +class VideoFrameInput; struct AudioSenderConfig; struct VideoSenderConfig; } // namespace cast @@ -38,9 +39,10 @@ class CastSessionDelegate; // CastSessionDelegate on the IO thread. class CastSession : public base::RefCounted<CastSession> { public: - typedef - base::Callback<void(const scoped_refptr<media::cast::FrameInput>&)> - FrameInputAvailableCallback; + typedef base::Callback<void(const scoped_refptr< + media::cast::AudioFrameInput>&)> AudioFrameInputAvailableCallback; + typedef base::Callback<void(const scoped_refptr< + media::cast::VideoFrameInput>&)> VideoFrameInputAvailableCallback; typedef base::Callback<void(const std::vector<char>&)> SendPacketCallback; typedef base::Callback<void(scoped_ptr<std::string>)> EventLogsCallback; typedef base::Callback<void(scoped_ptr<base::DictionaryValue>)> StatsCallback; @@ -53,9 +55,9 @@ class CastSession : public base::RefCounted<CastSession> { // media::cast::FrameInput will be given through the callback. The // callback will be made on the main thread. void StartAudio(const media::cast::AudioSenderConfig& config, - const FrameInputAvailableCallback& callback); + const AudioFrameInputAvailableCallback& callback); void StartVideo(const media::cast::VideoSenderConfig& config, - const FrameInputAvailableCallback& callback); + const VideoFrameInputAvailableCallback& callback); void StartUDP(const net::IPEndPoint& local_endpoint, const net::IPEndPoint& remote_endpoint); diff --git a/chrome/renderer/media/cast_session_delegate.cc b/chrome/renderer/media/cast_session_delegate.cc index 11f39185..82e3f1d 100644 --- a/chrome/renderer/media/cast_session_delegate.cc +++ b/chrome/renderer/media/cast_session_delegate.cc @@ -43,8 +43,7 @@ const int kMaxAudioEventEntries = kMaxSerializedBytes / 75; } // namespace CastSessionDelegate::CastSessionDelegate() - : transport_configured_(false), - io_message_loop_proxy_( + : io_message_loop_proxy_( content::RenderThread::Get()->GetIOMessageLoopProxy()), weak_factory_(this) { DCHECK(io_message_loop_proxy_); @@ -63,56 +62,80 @@ CastSessionDelegate::~CastSessionDelegate() { } } -void CastSessionDelegate::Initialize( - const media::cast::CastLoggingConfig& logging_config) { - if (cast_environment_) - return; // Already initialized. - - // CastSender uses the renderer's IO thread as the main thread. This reduces - // thread hopping for incoming video frames and outgoing network packets. - // There's no need to decode so no thread assigned for decoding. - cast_environment_ = new CastEnvironment( - scoped_ptr<base::TickClock>(new base::DefaultTickClock()).Pass(), - base::MessageLoopProxy::current(), - g_cast_threads.Get().GetAudioEncodeMessageLoopProxy(), - NULL, - g_cast_threads.Get().GetVideoEncodeMessageLoopProxy(), - NULL, - base::MessageLoopProxy::current(), - logging_config); -} - void CastSessionDelegate::StartAudio( const AudioSenderConfig& config, - const FrameInputAvailableCallback& callback) { + const AudioFrameInputAvailableCallback& callback) { DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); - audio_config_.reset(new AudioSenderConfig(config)); - video_frame_input_available_callback_ = callback; - StartSendingInternal(); + audio_frame_input_available_callback_ = callback; + media::cast::transport::CastTransportAudioConfig transport_config; + transport_config.base.ssrc = config.sender_ssrc; + transport_config.codec = config.codec; + transport_config.base.rtp_config = config.rtp_config; + transport_config.frequency = config.frequency; + transport_config.channels = config.channels; + cast_transport_->InitializeAudio(transport_config); + cast_sender_->InitializeAudio( + config, + base::Bind(&CastSessionDelegate::InitializationResult, + weak_factory_.GetWeakPtr())); } void CastSessionDelegate::StartVideo( const VideoSenderConfig& config, - const FrameInputAvailableCallback& callback) { + const VideoFrameInputAvailableCallback& callback) { DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); - audio_frame_input_available_callback_ = callback; + video_frame_input_available_callback_ = callback; - video_config_.reset(new VideoSenderConfig(config)); - StartSendingInternal(); + media::cast::transport::CastTransportVideoConfig transport_config; + transport_config.base.ssrc = config.sender_ssrc; + transport_config.codec = config.codec; + transport_config.base.rtp_config = config.rtp_config; + cast_transport_->InitializeVideo(transport_config); + // TODO(mikhal): Pass in a valid GpuVideoAcceleratorFactories to support + // hardware video encoding. + cast_sender_->InitializeVideo( + config, + base::Bind(&CastSessionDelegate::InitializationResult, + weak_factory_.GetWeakPtr()), + NULL /* GPU*/); } void CastSessionDelegate::StartUDP(const net::IPEndPoint& local_endpoint, const net::IPEndPoint& remote_endpoint) { DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); - transport_configured_ = true; - local_endpoint_ = local_endpoint; - remote_endpoint_ = remote_endpoint; - StartSendingInternal(); + + // CastSender uses the renderer's IO thread as the main thread. This reduces + // thread hopping for incoming video frames and outgoing network packets. + // There's no need to decode so no thread assigned for decoding. + cast_environment_ = new CastEnvironment( + scoped_ptr<base::TickClock>(new base::DefaultTickClock()).Pass(), + base::MessageLoopProxy::current(), + g_cast_threads.Get().GetAudioEncodeMessageLoopProxy(), + NULL, + g_cast_threads.Get().GetVideoEncodeMessageLoopProxy(), + NULL, + base::MessageLoopProxy::current(), + media::cast::GetLoggingConfigWithRawEventsAndStatsEnabled()); + + // Logging: enable raw events and stats collection. + media::cast::CastLoggingConfig logging_config = + media::cast::GetLoggingConfigWithRawEventsAndStatsEnabled(); + // Rationale for using unretained: The callback cannot be called after the + // destruction of CastTransportSenderIPC, and they both share the same thread. + cast_transport_.reset(new CastTransportSenderIPC( + local_endpoint, + remote_endpoint, + base::Bind(&CastSessionDelegate::StatusNotificationCB, + base::Unretained(this)), + logging_config, + base::Bind(&CastSessionDelegate::LogRawEvents, base::Unretained(this)))); + + cast_sender_ = CastSender::Create(cast_environment_, cast_transport_.get()); + cast_transport_->SetPacketReceiver(cast_sender_->packet_receiver()); } -void CastSessionDelegate::ToggleLogging(bool is_audio, - bool enable) { +void CastSessionDelegate::ToggleLogging(bool is_audio, bool enable) { DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); if (enable) { if (is_audio) { @@ -148,11 +171,12 @@ void CastSessionDelegate::ToggleLogging(bool is_audio, } void CastSessionDelegate::GetEventLogsAndReset( - bool is_audio, const EventLogsCallback& callback) { + bool is_audio, + const EventLogsCallback& callback) { DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); - media::cast::EncodingEventSubscriber* subscriber = is_audio ? - audio_event_subscriber_.get() : video_event_subscriber_.get(); + media::cast::EncodingEventSubscriber* subscriber = + is_audio ? audio_event_subscriber_.get() : video_event_subscriber_.get(); if (!subscriber) { callback.Run(make_scoped_ptr(new std::string).Pass()); return; @@ -202,73 +226,20 @@ void CastSessionDelegate::StatusNotificationCB( // TODO(hubbe): Call javascript UDPTransport error function. } -void CastSessionDelegate::StartSendingInternal() { - DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); - - // No transport, wait. - if (!transport_configured_) - return; - - // No audio or video, wait. - if (!audio_config_ || !video_config_) - return; - - // Logging: enable raw events and stats collection. - media::cast::CastLoggingConfig logging_config = - media::cast::GetLoggingConfigWithRawEventsAndStatsEnabled(); - Initialize(logging_config); - - // Rationale for using unretained: The callback cannot be called after the - // destruction of CastTransportSenderIPC, and they both share the same thread. - cast_transport_.reset(new CastTransportSenderIPC( - local_endpoint_, - remote_endpoint_, - base::Bind(&CastSessionDelegate::StatusNotificationCB, - base::Unretained(this)), - logging_config, - base::Bind(&CastSessionDelegate::LogRawEvents, - base::Unretained(this)))); - - // TODO(hubbe): set config.aes_key and config.aes_iv_mask. - if (audio_config_) { - media::cast::transport::CastTransportAudioConfig config; - config.base.ssrc = audio_config_->sender_ssrc; - config.codec = audio_config_->codec; - config.base.rtp_config = audio_config_->rtp_config; - config.frequency = audio_config_->frequency; - config.channels = audio_config_->channels; - cast_transport_->InitializeAudio(config); - } - if (video_config_) { - media::cast::transport::CastTransportVideoConfig config; - config.base.ssrc = video_config_->sender_ssrc; - config.codec = video_config_->codec; - config.base.rtp_config = video_config_->rtp_config; - cast_transport_->InitializeVideo(config); - } - - cast_sender_.reset(CastSender::CreateCastSender( - cast_environment_, - audio_config_.get(), - video_config_.get(), - NULL, // GPU. - base::Bind(&CastSessionDelegate::InitializationResult, - weak_factory_.GetWeakPtr()), - cast_transport_.get())); - cast_transport_->SetPacketReceiver(cast_sender_->packet_receiver()); -} - void CastSessionDelegate::InitializationResult( media::cast::CastInitializationStatus result) const { DCHECK(cast_sender_); // TODO(pwestin): handle the error codes. - if (result == media::cast::STATUS_INITIALIZED) { + if (result == media::cast::STATUS_AUDIO_INITIALIZED) { if (!audio_frame_input_available_callback_.is_null()) { - audio_frame_input_available_callback_.Run(cast_sender_->frame_input()); + audio_frame_input_available_callback_.Run( + cast_sender_->audio_frame_input()); } + } else if (result == media::cast::STATUS_VIDEO_INITIALIZED) { if (!video_frame_input_available_callback_.is_null()) { - video_frame_input_available_callback_.Run(cast_sender_->frame_input()); + video_frame_input_available_callback_.Run( + cast_sender_->video_frame_input()); } } } diff --git a/chrome/renderer/media/cast_session_delegate.h b/chrome/renderer/media/cast_session_delegate.h index 07dd5ad..f02dfa3 100644 --- a/chrome/renderer/media/cast_session_delegate.h +++ b/chrome/renderer/media/cast_session_delegate.h @@ -44,25 +44,32 @@ class CastTransportSender; // thread. All methods are accessible only on the IO thread. class CastSessionDelegate { public: - typedef base::Callback<void(const scoped_refptr<media::cast::FrameInput>&)> - FrameInputAvailableCallback; + typedef base::Callback<void(const scoped_refptr< + media::cast::AudioFrameInput>&)> AudioFrameInputAvailableCallback; + typedef base::Callback<void(const scoped_refptr< + media::cast::VideoFrameInput>&)> VideoFrameInputAvailableCallback; typedef base::Callback<void(scoped_ptr<std::string>)> EventLogsCallback; typedef base::Callback<void(scoped_ptr<base::DictionaryValue>)> StatsCallback; CastSessionDelegate(); virtual ~CastSessionDelegate(); + // This will start the session by configuring and creating the Cast transport + // and the Cast sender. + // Must be called before initialization of audio or video. + void StartUDP(const net::IPEndPoint& local_endpoint, + const net::IPEndPoint& remote_endpoint); + // After calling StartAudio() or StartVideo() encoding of that media will // begin as soon as data is delivered to its sink, if the second method is // called the first media will be restarted. It is strongly recommended not to // deliver any data between calling the two methods. // It's OK to call only one of the two methods. + // StartUDP must be called before these methods. void StartAudio(const media::cast::AudioSenderConfig& config, - const FrameInputAvailableCallback& callback); + const AudioFrameInputAvailableCallback& callback); void StartVideo(const media::cast::VideoSenderConfig& config, - const FrameInputAvailableCallback& callback); - void StartUDP(const net::IPEndPoint& local_endpoint, - const net::IPEndPoint& remote_endpoint); + const VideoFrameInputAvailableCallback& callback); void ToggleLogging(bool is_audio, bool enable); void GetEventLogsAndReset(bool is_audio, const EventLogsCallback& callback); @@ -75,13 +82,6 @@ class CastSessionDelegate { void InitializationResult(media::cast::CastInitializationStatus result) const; private: - // Start encoding threads and initialize the CastEnvironment. - void Initialize(const media::cast::CastLoggingConfig& logging_config); - - // Configure CastSender. It is ready to accept audio / video frames after - // receiving a successful call to InitializationResult. - void StartSendingInternal(); - void StatusNotificationCB( media::cast::transport::CastTransportStatus status); @@ -93,16 +93,8 @@ class CastSessionDelegate { scoped_ptr<media::cast::CastSender> cast_sender_; scoped_ptr<media::cast::transport::CastTransportSender> cast_transport_; - // Configuration for audio and video. - scoped_ptr<media::cast::AudioSenderConfig> audio_config_; - scoped_ptr<media::cast::VideoSenderConfig> video_config_; - - FrameInputAvailableCallback audio_frame_input_available_callback_; - FrameInputAvailableCallback video_frame_input_available_callback_; - - net::IPEndPoint local_endpoint_; - net::IPEndPoint remote_endpoint_; - bool transport_configured_; + AudioFrameInputAvailableCallback audio_frame_input_available_callback_; + VideoFrameInputAvailableCallback video_frame_input_available_callback_; scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber_; scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber_; diff --git a/media/cast/audio_sender/audio_encoder.cc b/media/cast/audio_sender/audio_encoder.cc index 2363619..efe7a01 100644 --- a/media/cast/audio_sender/audio_encoder.cc +++ b/media/cast/audio_sender/audio_encoder.cc @@ -52,16 +52,16 @@ class AudioEncoder::ImplBase : public base::SupportsWeakPtr<ImplBase> { sampling_rate % 100 != 0 || samples_per_10ms_ * num_channels_ > transport::EncodedAudioFrame::kMaxNumberOfSamples) { - initialization_status_ = STATUS_INVALID_AUDIO_CONFIGURATION; + cast_initialization_cb_ = STATUS_INVALID_AUDIO_CONFIGURATION; } else { - initialization_status_ = STATUS_INITIALIZED; + cast_initialization_cb_ = STATUS_AUDIO_INITIALIZED; } } virtual ~ImplBase() {} CastInitializationStatus InitializationResult() const { - return initialization_status_; + return cast_initialization_cb_; } void LogAudioFrameEvent(uint32 rtp_timestamp, @@ -156,7 +156,7 @@ class AudioEncoder::ImplBase : public base::SupportsWeakPtr<ImplBase> { const int num_channels_; const int samples_per_10ms_; const FrameEncodedCallback callback_; - CastInitializationStatus initialization_status_; + CastInitializationStatus cast_initialization_cb_; private: // In the case where a call to EncodeAudio() cannot completely fill the @@ -192,7 +192,7 @@ class AudioEncoder::OpusImpl : public AudioEncoder::ImplBase { encoder_memory_(new uint8[opus_encoder_get_size(num_channels)]), opus_encoder_(reinterpret_cast<OpusEncoder*>(encoder_memory_.get())), buffer_(new float[num_channels * samples_per_10ms_]) { - if (ImplBase::initialization_status_ != STATUS_INITIALIZED) { + if (ImplBase::cast_initialization_cb_ != STATUS_AUDIO_INITIALIZED) { return; } @@ -316,7 +316,6 @@ AudioEncoder::AudioEncoder( // Note: It doesn't matter which thread constructs AudioEncoder, just so long // as all calls to InsertAudio() are by the same thread. insert_thread_checker_.DetachFromThread(); - switch (audio_config.codec) { case transport::kOpus: impl_.reset(new OpusImpl(cast_environment, @@ -340,6 +339,7 @@ AudioEncoder::AudioEncoder( AudioEncoder::~AudioEncoder() {} CastInitializationStatus AudioEncoder::InitializationResult() const { + DCHECK(insert_thread_checker_.CalledOnValidThread()); if (impl_) { return impl_->InitializationResult(); } diff --git a/media/cast/audio_sender/audio_sender.cc b/media/cast/audio_sender/audio_sender.cc index 4ca58ea..cd62f9b 100644 --- a/media/cast/audio_sender/audio_sender.cc +++ b/media/cast/audio_sender/audio_sender.cc @@ -113,7 +113,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, audio_config.incoming_feedback_ssrc, audio_config.rtcp_c_name), timers_initialized_(false), - initialization_status_(STATUS_INITIALIZED), + cast_initialization_cb_(STATUS_AUDIO_UNINITIALIZED), weak_factory_(this) { rtcp_.SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize); if (!audio_config.use_external_encoder) { @@ -122,7 +122,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, audio_config, base::Bind(&AudioSender::SendEncodedAudioFrame, weak_factory_.GetWeakPtr())); - initialization_status_ = audio_encoder_->InitializationResult(); + cast_initialization_cb_ = audio_encoder_->InitializationResult(); } } @@ -141,7 +141,6 @@ void AudioSender::InsertAudio(const AudioBus* audio_bus, const base::Closure& done_callback) { DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); DCHECK(audio_encoder_.get()) << "Invalid internal state"; - audio_encoder_->InsertAudio(audio_bus, recorded_time, done_callback); } diff --git a/media/cast/audio_sender/audio_sender.h b/media/cast/audio_sender/audio_sender.h index 0399833..6afa7cf 100644 --- a/media/cast/audio_sender/audio_sender.h +++ b/media/cast/audio_sender/audio_sender.h @@ -39,7 +39,7 @@ class AudioSender : public base::NonThreadSafe, virtual ~AudioSender(); CastInitializationStatus InitializationResult() const { - return initialization_status_; + return cast_initialization_cb_; } // The |audio_bus| must be valid until the |done_callback| is called. @@ -85,7 +85,7 @@ class AudioSender : public base::NonThreadSafe, scoped_ptr<LocalRtcpAudioSenderFeedback> rtcp_feedback_; Rtcp rtcp_; bool timers_initialized_; - CastInitializationStatus initialization_status_; + CastInitializationStatus cast_initialization_cb_; DISALLOW_COPY_AND_ASSIGN(AudioSender); }; diff --git a/media/cast/cast_config.h b/media/cast/cast_config.h index 61e9620..21caea0 100644 --- a/media/cast/cast_config.h +++ b/media/cast/cast_config.h @@ -134,17 +134,6 @@ struct PcmAudioFrame { typedef transport::Packet Packet; typedef transport::PacketList PacketList; -enum CastInitializationStatus { - STATUS_INITIALIZED, - STATUS_INVALID_CAST_ENVIRONMENT, - STATUS_INVALID_CRYPTO_CONFIGURATION, - STATUS_UNSUPPORTED_AUDIO_CODEC, - STATUS_INVALID_AUDIO_CONFIGURATION, - STATUS_INVALID_VIDEO_CONFIGURATION, - STATUS_GPU_ACCELERATION_NOT_SUPPORTED, - STATUS_GPU_ACCELERATION_ERROR, -}; - typedef base::Callback<void(CastInitializationStatus)> CastInitializationCallback; diff --git a/media/cast/cast_defines.h b/media/cast/cast_defines.h index 2e0986f..2ee431e 100644 --- a/media/cast/cast_defines.h +++ b/media/cast/cast_defines.h @@ -29,6 +29,20 @@ const int kStartRttMs = 20; const int64 kCastMessageUpdateIntervalMs = 33; const int64 kNackRepeatIntervalMs = 30; +enum CastInitializationStatus { + STATUS_AUDIO_UNINITIALIZED, + STATUS_VIDEO_UNINITIALIZED, + STATUS_AUDIO_INITIALIZED, + STATUS_VIDEO_INITIALIZED, + STATUS_INVALID_CAST_ENVIRONMENT, + STATUS_INVALID_CRYPTO_CONFIGURATION, + STATUS_UNSUPPORTED_AUDIO_CODEC, + STATUS_INVALID_AUDIO_CONFIGURATION, + STATUS_INVALID_VIDEO_CONFIGURATION, + STATUS_GPU_ACCELERATION_NOT_SUPPORTED, + STATUS_GPU_ACCELERATION_ERROR, +}; + enum DefaultSettings { kDefaultAudioEncoderBitrate = 0, // This means "auto," and may mean VBR. kDefaultAudioSamplingRate = 48000, diff --git a/media/cast/cast_receiver.h b/media/cast/cast_receiver.h index ec4f0d3..581a271 100644 --- a/media/cast/cast_receiver.h +++ b/media/cast/cast_receiver.h @@ -68,7 +68,7 @@ class FrameReceiver : public base::RefCountedThreadSafe<FrameReceiver> { // This Class is thread safe. class CastReceiver { public: - static CastReceiver* CreateCastReceiver( + static scoped_ptr<CastReceiver> Create( scoped_refptr<CastEnvironment> cast_environment, const AudioReceiverConfig& audio_config, const VideoReceiverConfig& video_config, diff --git a/media/cast/cast_receiver_impl.cc b/media/cast/cast_receiver_impl.cc index f9fc5a6..4561fea 100644 --- a/media/cast/cast_receiver_impl.cc +++ b/media/cast/cast_receiver_impl.cc @@ -82,13 +82,13 @@ class LocalFrameReceiver : public FrameReceiver { VideoReceiver* video_receiver_; }; -CastReceiver* CastReceiver::CreateCastReceiver( +scoped_ptr<CastReceiver> CastReceiver::Create( scoped_refptr<CastEnvironment> cast_environment, const AudioReceiverConfig& audio_config, const VideoReceiverConfig& video_config, transport::PacketSender* const packet_sender) { - return new CastReceiverImpl( - cast_environment, audio_config, video_config, packet_sender); + return scoped_ptr<CastReceiver>(new CastReceiverImpl( + cast_environment, audio_config, video_config, packet_sender)); } CastReceiverImpl::CastReceiverImpl( diff --git a/media/cast/cast_sender.h b/media/cast/cast_sender.h index a15e6d3..33b9393 100644 --- a/media/cast/cast_sender.h +++ b/media/cast/cast_sender.h @@ -2,11 +2,10 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // -// This is the main interface for the cast sender. All configuration are done -// at creation. +// This is the main interface for the cast sender. // -// The FrameInput and PacketReciever interfaces should normally be accessed from -// the IO thread. However they are allowed to be called from any thread. +// The AudioFrameInput, VideoFrameInput and PacketReciever interfaces should +// be accessed from the main thread. #ifndef MEDIA_CAST_CAST_SENDER_H_ #define MEDIA_CAST_CAST_SENDER_H_ @@ -23,23 +22,32 @@ namespace media { class AudioBus; +class GpuVideoAcceleratorFactories; class VideoFrame; -} -namespace media { namespace cast { +class AudioSender; +class VideoSender; -// This Class is thread safe. -class FrameInput : public base::RefCountedThreadSafe<FrameInput> { +class VideoFrameInput : public base::RefCountedThreadSafe<VideoFrameInput> { public: - // The video_frame must be valid until the callback is called. - // The callback is called from the main cast thread as soon as - // the encoder is done with the frame; it does not mean that the encoded frame - // has been sent out. + // Insert video frames into Cast sender. Frames will be encoded, packetized + // and sent to the network. virtual void InsertRawVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, const base::TimeTicks& capture_time) = 0; + protected: + virtual ~VideoFrameInput() {} + + private: + friend class base::RefCountedThreadSafe<VideoFrameInput>; +}; + +class AudioFrameInput : public base::RefCountedThreadSafe<AudioFrameInput> { + public: + // Insert audio frames into Cast sender. Frames will be encoded, packetized + // and sent to the network. // The |audio_bus| must be valid until the |done_callback| is called. // The callback is called from the main cast thread as soon as the encoder is // done with |audio_bus|; it does not mean that the encoded data has been @@ -49,36 +57,44 @@ class FrameInput : public base::RefCountedThreadSafe<FrameInput> { const base::Closure& done_callback) = 0; protected: - virtual ~FrameInput() {} + virtual ~AudioFrameInput() {} private: - friend class base::RefCountedThreadSafe<FrameInput>; + friend class base::RefCountedThreadSafe<AudioFrameInput>; }; -// This Class is thread safe. -// The provided CastTransportSender object will always be called from the main -// cast thread. -// At least one of AudioSenderConfig and VideoSenderConfig have to be provided. +// The provided CastTransportSender and the CastSender should be called from the +// main thread. class CastSender { public: - static CastSender* CreateCastSender( + static scoped_ptr<CastSender> Create( scoped_refptr<CastEnvironment> cast_environment, - const AudioSenderConfig* audio_config, - const VideoSenderConfig* video_config, - const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, - const CastInitializationCallback& cast_initialization, transport::CastTransportSender* const transport_sender); virtual ~CastSender() {} - // All audio and video frames for the session should be inserted to this - // object. - // Can be called from any thread. - virtual scoped_refptr<FrameInput> frame_input() = 0; + // All video frames for the session should be inserted to this object. + virtual scoped_refptr<VideoFrameInput> video_frame_input() = 0; + + // All audio frames for the session should be inserted to this object. + virtual scoped_refptr<AudioFrameInput> audio_frame_input() = 0; // All RTCP packets for the session should be inserted to this object. - // Can be called from any thread. + // This function and the callback must be called on the main thread. virtual transport::PacketReceiverCallback packet_receiver() = 0; + + // Initialize the audio stack. Must be called in order to send audio frames. + // Status of the initialization will be returned on cast_initialization_cb. + virtual void InitializeAudio( + const AudioSenderConfig& audio_config, + const CastInitializationCallback& cast_initialization_cb) = 0; + + // Initialize the video stack. Must be called in order to send video frames. + // Status of the initialization will be returned on cast_initialization_cb. + virtual void InitializeVideo( + const VideoSenderConfig& video_config, + const CastInitializationCallback& cast_initialization_cb, + const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories) = 0; }; } // namespace cast diff --git a/media/cast/cast_sender_impl.cc b/media/cast/cast_sender_impl.cc index 1a97ae4..c461d35 100644 --- a/media/cast/cast_sender_impl.cc +++ b/media/cast/cast_sender_impl.cc @@ -12,17 +12,13 @@ namespace media { namespace cast { -// The LocalFrameInput class posts all incoming frames; audio and video to the -// main cast thread for processing. -// This make the cast sender interface thread safe. -class LocalFrameInput : public FrameInput { +// The LocalVideoFrameInput class posts all incoming video frames to the main +// cast thread for processing. +class LocalVideoFrameInput : public VideoFrameInput { public: - LocalFrameInput(scoped_refptr<CastEnvironment> cast_environment, - base::WeakPtr<AudioSender> audio_sender, - base::WeakPtr<VideoSender> video_sender) - : cast_environment_(cast_environment), - audio_sender_(audio_sender), - video_sender_(video_sender) {} + LocalVideoFrameInput(scoped_refptr<CastEnvironment> cast_environment, + base::WeakPtr<VideoSender> video_sender) + : cast_environment_(cast_environment), video_sender_(video_sender) {} virtual void InsertRawVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, @@ -35,6 +31,26 @@ class LocalFrameInput : public FrameInput { capture_time)); } + protected: + virtual ~LocalVideoFrameInput() {} + + private: + friend class base::RefCountedThreadSafe<LocalVideoFrameInput>; + + scoped_refptr<CastEnvironment> cast_environment_; + base::WeakPtr<VideoSender> video_sender_; + + DISALLOW_COPY_AND_ASSIGN(LocalVideoFrameInput); +}; + +// The LocalAudioFrameInput class posts all incoming audio frames to the main +// cast thread for processing. Therefore frames can be inserted from any thread. +class LocalAudioFrameInput : public AudioFrameInput { + public: + LocalAudioFrameInput(scoped_refptr<CastEnvironment> cast_environment, + base::WeakPtr<AudioSender> audio_sender) + : cast_environment_(cast_environment), audio_sender_(audio_sender) {} + virtual void InsertAudio(const AudioBus* audio_bus, const base::TimeTicks& recorded_time, const base::Closure& done_callback) OVERRIDE { @@ -48,96 +64,71 @@ class LocalFrameInput : public FrameInput { } protected: - virtual ~LocalFrameInput() {} + virtual ~LocalAudioFrameInput() {} private: - friend class base::RefCountedThreadSafe<LocalFrameInput>; + friend class base::RefCountedThreadSafe<LocalAudioFrameInput>; scoped_refptr<CastEnvironment> cast_environment_; base::WeakPtr<AudioSender> audio_sender_; - base::WeakPtr<VideoSender> video_sender_; - DISALLOW_COPY_AND_ASSIGN(LocalFrameInput); + DISALLOW_COPY_AND_ASSIGN(LocalAudioFrameInput); }; -CastSender* CastSender::CreateCastSender( +scoped_ptr<CastSender> CastSender::Create( scoped_refptr<CastEnvironment> cast_environment, - const AudioSenderConfig* audio_config, - const VideoSenderConfig* video_config, - const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, - const CastInitializationCallback& initialization_status, transport::CastTransportSender* const transport_sender) { CHECK(cast_environment); - return new CastSenderImpl(cast_environment, - audio_config, - video_config, - gpu_factories, - initialization_status, - transport_sender); + return scoped_ptr<CastSender>( + new CastSenderImpl(cast_environment, transport_sender)); } CastSenderImpl::CastSenderImpl( scoped_refptr<CastEnvironment> cast_environment, - const AudioSenderConfig* audio_config, - const VideoSenderConfig* video_config, - const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, - const CastInitializationCallback& initialization_status, transport::CastTransportSender* const transport_sender) - : initialization_callback_(initialization_status), - packet_receiver_( - base::Bind(&CastSenderImpl::ReceivedPacket, base::Unretained(this))), - cast_environment_(cast_environment), + : cast_environment_(cast_environment), + transport_sender_(transport_sender), weak_factory_(this) { CHECK(cast_environment); - CHECK(audio_config || video_config); - - base::WeakPtr<AudioSender> audio_sender_ptr; - base::WeakPtr<VideoSender> video_sender_ptr; - - if (audio_config) { - CHECK(audio_config->use_external_encoder || - cast_environment->HasAudioEncoderThread()); - - audio_sender_.reset( - new AudioSender(cast_environment, *audio_config, transport_sender)); - ssrc_of_audio_sender_ = audio_config->incoming_feedback_ssrc; - audio_sender_ptr = audio_sender_->AsWeakPtr(); - - CastInitializationStatus status = audio_sender_->InitializationResult(); - if (status != STATUS_INITIALIZED || !video_config) { - if (status == STATUS_INITIALIZED && !video_config) { - // Audio only. - frame_input_ = new LocalFrameInput( - cast_environment, audio_sender_ptr, video_sender_ptr); - } - cast_environment->PostTask( - CastEnvironment::MAIN, - FROM_HERE, - base::Bind(&CastSenderImpl::InitializationResult, - weak_factory_.GetWeakPtr(), - status)); - return; - } - } - if (video_config) { - CHECK(video_config->use_external_encoder || - cast_environment->HasVideoEncoderThread()); - - video_sender_.reset( - new VideoSender(cast_environment, - *video_config, - gpu_factories, - base::Bind(&CastSenderImpl::InitializationResult, - weak_factory_.GetWeakPtr()), - transport_sender)); - video_sender_ptr = video_sender_->AsWeakPtr(); - ssrc_of_video_sender_ = video_config->incoming_feedback_ssrc; +} + +void CastSenderImpl::InitializeAudio( + const AudioSenderConfig& audio_config, + const CastInitializationCallback& cast_initialization_cb) { + DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); + CHECK(audio_config.use_external_encoder || + cast_environment_->HasAudioEncoderThread()); + + audio_sender_.reset( + new AudioSender(cast_environment_, audio_config, transport_sender_)); + + CastInitializationStatus status = audio_sender_->InitializationResult(); + + if (status == STATUS_AUDIO_INITIALIZED) { + ssrc_of_audio_sender_ = audio_config.incoming_feedback_ssrc; + audio_frame_input_ = + new LocalAudioFrameInput(cast_environment_, audio_sender_->AsWeakPtr()); } - frame_input_ = - new LocalFrameInput(cast_environment, audio_sender_ptr, video_sender_ptr); + cast_initialization_cb.Run(status); +} - // Handing over responsibility to call NotifyInitialization to the - // video sender. +void CastSenderImpl::InitializeVideo( + const VideoSenderConfig& video_config, + const CastInitializationCallback& cast_initialization_cb, + const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories) { + DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); + CHECK(video_config.use_external_encoder || + cast_environment_->HasVideoEncoderThread()); + + video_sender_.reset(new VideoSender(cast_environment_, + video_config, + gpu_factories, + cast_initialization_cb, + transport_sender_)); + + ssrc_of_video_sender_ = video_config.incoming_feedback_ssrc; + video_frame_input_ = + new LocalVideoFrameInput(cast_environment_, video_sender_->AsWeakPtr()); } CastSenderImpl::~CastSenderImpl() {} @@ -203,16 +194,17 @@ void CastSenderImpl::ReceivedPacket(scoped_ptr<Packet> packet) { } } -scoped_refptr<FrameInput> CastSenderImpl::frame_input() { return frame_input_; } +scoped_refptr<AudioFrameInput> CastSenderImpl::audio_frame_input() { + return audio_frame_input_; +} -transport::PacketReceiverCallback CastSenderImpl::packet_receiver() { - return packet_receiver_; - return base::Bind(&CastSenderImpl::ReceivedPacket, base::Unretained(this)); +scoped_refptr<VideoFrameInput> CastSenderImpl::video_frame_input() { + return video_frame_input_; } -void CastSenderImpl::InitializationResult(CastInitializationStatus status) - const { - initialization_callback_.Run(status); +transport::PacketReceiverCallback CastSenderImpl::packet_receiver() { + return base::Bind(&CastSenderImpl::ReceivedPacket, + weak_factory_.GetWeakPtr()); } } // namespace cast diff --git a/media/cast/cast_sender_impl.h b/media/cast/cast_sender_impl.h index bfb4041..85b8669 100644 --- a/media/cast/cast_sender_impl.h +++ b/media/cast/cast_sender_impl.h @@ -8,49 +8,53 @@ #include "base/memory/scoped_ptr.h" #include "media/cast/audio_sender/audio_sender.h" #include "media/cast/cast_config.h" +#include "media/cast/cast_defines.h" #include "media/cast/cast_environment.h" #include "media/cast/cast_sender.h" #include "media/cast/video_sender/video_sender.h" namespace media { class VideoFrame; -} -namespace media { namespace cast { - class AudioSender; class VideoSender; -// This calls is a pure owner class that group all required sending objects -// together such as pacer, packet receiver, frame input, audio and video sender. +// This class combines all required sending objects such as the audio and video +// senders, pacer, packet receiver and frame input. class CastSenderImpl : public CastSender { public: - CastSenderImpl( - scoped_refptr<CastEnvironment> cast_environment, - const AudioSenderConfig* audio_config, - const VideoSenderConfig* video_config, - const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, - const CastInitializationCallback& initialization_status, - transport::CastTransportSender* const transport_sender); + CastSenderImpl(scoped_refptr<CastEnvironment> cast_environment, + transport::CastTransportSender* const transport_sender); + + virtual void InitializeAudio( + const AudioSenderConfig& audio_config, + const CastInitializationCallback& cast_initialization_cb) OVERRIDE; + virtual void InitializeVideo( + const VideoSenderConfig& video_config, + const CastInitializationCallback& cast_initialization_cb, + const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories) + OVERRIDE; virtual ~CastSenderImpl(); - virtual scoped_refptr<FrameInput> frame_input() OVERRIDE; + virtual scoped_refptr<AudioFrameInput> audio_frame_input() OVERRIDE; + virtual scoped_refptr<VideoFrameInput> video_frame_input() OVERRIDE; + virtual transport::PacketReceiverCallback packet_receiver() OVERRIDE; private: void ReceivedPacket(scoped_ptr<Packet> packet); - // Used to trampoline the result back on the correct thread. And guaranteed - // not to be called until the creation is complete. - void InitializationResult(CastInitializationStatus status) const; CastInitializationCallback initialization_callback_; scoped_ptr<AudioSender> audio_sender_; scoped_ptr<VideoSender> video_sender_; - scoped_refptr<FrameInput> frame_input_; - transport::PacketReceiverCallback packet_receiver_; + scoped_refptr<AudioFrameInput> audio_frame_input_; + scoped_refptr<VideoFrameInput> video_frame_input_; scoped_refptr<CastEnvironment> cast_environment_; + // The transport sender is owned by the owner of the CastSender, and should be + // valid throughout the lifetime of the CastSender. + transport::CastTransportSender* const transport_sender_; uint32 ssrc_of_audio_sender_; uint32 ssrc_of_video_sender_; base::WeakPtrFactory<CastSenderImpl> weak_factory_; diff --git a/media/cast/test/end2end_unittest.cc b/media/cast/test/end2end_unittest.cc index 1524c8f..71b2c3e 100644 --- a/media/cast/test/end2end_unittest.cc +++ b/media/cast/test/end2end_unittest.cc @@ -95,6 +95,14 @@ void UpdateCastTransportStatus(transport::CastTransportStatus status) { EXPECT_TRUE(result); } +void AudioInitializationStatus(CastInitializationStatus status) { + EXPECT_EQ(STATUS_AUDIO_INITIALIZED, status); +} + +void VideoInitializationStatus(CastInitializationStatus status) { + EXPECT_EQ(STATUS_VIDEO_INITIALIZED, status); +} + // This is wrapped in a struct because it needs to be put into a std::map. typedef struct { int counter[kNumOfLoggingEvents]; @@ -396,8 +404,8 @@ class End2EndTest : public ::testing::Test { : start_time_(), testing_clock_sender_(new base::SimpleTestTickClock()), testing_clock_receiver_(new base::SimpleTestTickClock()), - task_runner_(new test::FakeSingleThreadTaskRunner( - testing_clock_sender_)), + task_runner_( + new test::FakeSingleThreadTaskRunner(testing_clock_sender_)), logging_config_(GetLoggingConfigWithRawEventsAndStatsEnabled()), cast_environment_sender_(new CastEnvironment( scoped_ptr<base::TickClock>(testing_clock_sender_).Pass(), @@ -492,11 +500,10 @@ class End2EndTest : public ::testing::Test { } void Create() { - cast_receiver_.reset( - CastReceiver::CreateCastReceiver(cast_environment_receiver_, - audio_receiver_config_, - video_receiver_config_, - &receiver_to_sender_)); + cast_receiver_ = CastReceiver::Create(cast_environment_receiver_, + audio_receiver_config_, + video_receiver_config_, + &receiver_to_sender_); net::IPEndPoint dummy_endpoint; transport_sender_.reset(new transport::CastTransportSenderImpl( NULL, @@ -512,18 +519,21 @@ class End2EndTest : public ::testing::Test { transport_sender_->InitializeAudio(transport_audio_config_); transport_sender_->InitializeVideo(transport_video_config_); - cast_sender_.reset(CastSender::CreateCastSender( - cast_environment_sender_, - &audio_sender_config_, - &video_sender_config_, - NULL, - base::Bind(&End2EndTest::InitializationResult, base::Unretained(this)), - transport_sender_.get())); + cast_sender_ = + CastSender::Create(cast_environment_sender_, transport_sender_.get()); + + // Initializing audio and video senders. + cast_sender_->InitializeAudio(audio_sender_config_, + base::Bind(&AudioInitializationStatus)); + cast_sender_->InitializeVideo( + video_sender_config_, base::Bind(&VideoInitializationStatus), NULL); receiver_to_sender_.SetPacketReceiver(cast_sender_->packet_receiver()); sender_to_receiver_.SetPacketReceiver(cast_receiver_->packet_receiver()); - frame_input_ = cast_sender_->frame_input(); + audio_frame_input_ = cast_sender_->audio_frame_input(); + video_frame_input_ = cast_sender_->video_frame_input(); + frame_receiver_ = cast_receiver_->frame_receiver(); audio_bus_factory_.reset( @@ -555,7 +565,7 @@ class End2EndTest : public ::testing::Test { media::VideoFrame::CreateFrame( VideoFrame::I420, size, gfx::Rect(size), size, time_diff); PopulateVideoFrame(video_frame, start_value); - frame_input_->InsertRawVideoFrame(video_frame, capture_time); + video_frame_input_->InsertRawVideoFrame(video_frame, capture_time); } void RunTasks(int during_ms) { @@ -567,10 +577,6 @@ class End2EndTest : public ::testing::Test { } } - void InitializationResult(CastInitializationStatus result) { - EXPECT_EQ(result, STATUS_INITIALIZED); - } - void LogRawEvents(const std::vector<PacketEvent>& packet_events) { EXPECT_FALSE(packet_events.empty()); for (std::vector<media::cast::PacketEvent>::const_iterator it = @@ -608,7 +614,8 @@ class End2EndTest : public ::testing::Test { scoped_ptr<CastReceiver> cast_receiver_; scoped_ptr<CastSender> cast_sender_; - scoped_refptr<FrameInput> frame_input_; + scoped_refptr<AudioFrameInput> audio_frame_input_; + scoped_refptr<VideoFrameInput> video_frame_input_; scoped_refptr<FrameReceiver> frame_receiver_; scoped_refptr<TestReceiverAudioCallback> test_receiver_audio_callback_; @@ -620,7 +627,6 @@ class End2EndTest : public ::testing::Test { std::vector<FrameEvent> frame_events_; std::vector<PacketEvent> packet_events_; std::vector<GenericEvent> generic_events_; - // |transport_sender_| has a RepeatingTimer which needs a MessageLoop. base::MessageLoop message_loop_; }; @@ -657,7 +663,7 @@ TEST_F(End2EndTest, LoopNoLossPcm16) { } AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, send_time, base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); @@ -714,7 +720,7 @@ TEST_F(End2EndTest, LoopNoLossPcm16ExternalDecoder) { send_time); AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, send_time, base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); @@ -749,7 +755,7 @@ TEST_F(End2EndTest, LoopNoLossOpus) { } AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, send_time, base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); @@ -799,7 +805,7 @@ TEST_F(End2EndTest, StartSenderBeforeReceiver) { base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks)); AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, send_time, base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); @@ -841,7 +847,7 @@ TEST_F(End2EndTest, StartSenderBeforeReceiver) { } AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, send_time, base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); @@ -1072,7 +1078,7 @@ TEST_F(End2EndTest, CryptoAudio) { send_time); } AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, send_time, base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); @@ -1250,7 +1256,7 @@ TEST_F(End2EndTest, AudioLogging) { } AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, send_time, base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); diff --git a/media/cast/test/sender.cc b/media/cast/test/sender.cc index 7a0ff7e..90d48c9 100644 --- a/media/cast/test/sender.cc +++ b/media/cast/test/sender.cc @@ -205,11 +205,13 @@ class SendProcess { SendProcess(scoped_refptr<base::SingleThreadTaskRunner> thread_proxy, base::TickClock* clock, const VideoSenderConfig& video_config, - FrameInput* frame_input) + scoped_refptr<AudioFrameInput> audio_frame_input, + scoped_refptr<VideoFrameInput> video_frame_input) : test_app_thread_proxy_(thread_proxy), video_config_(video_config), audio_diff_(kFrameTimerMs), - frame_input_(frame_input), + audio_frame_input_(audio_frame_input), + video_frame_input_(video_frame_input), synthetic_count_(0), clock_(clock), start_time_(), @@ -245,7 +247,7 @@ class SendProcess { scoped_ptr<AudioBus> audio_bus(audio_bus_factory_->NextAudioBus( base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks)); AudioBus* const audio_bus_ptr = audio_bus.get(); - frame_input_->InsertAudio( + audio_frame_input_->InsertAudio( audio_bus_ptr, clock_->NowTicks(), base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); @@ -277,21 +279,21 @@ class SendProcess { test_app_thread_proxy_->PostDelayedTask( FROM_HERE, base::Bind(&SendProcess::SendVideoFrameOnTime, - base::Unretained(this), + weak_factory_.GetWeakPtr(), video_frame), video_frame_time - elapsed_time); } else { test_app_thread_proxy_->PostTask( FROM_HERE, base::Bind(&SendProcess::SendVideoFrameOnTime, - base::Unretained(this), + weak_factory_.GetWeakPtr(), video_frame)); } } void SendVideoFrameOnTime(scoped_refptr<media::VideoFrame> video_frame) { send_time_ = clock_->NowTicks(); - frame_input_->InsertRawVideoFrame(video_frame, send_time_); + video_frame_input_->InsertRawVideoFrame(video_frame, send_time_); test_app_thread_proxy_->PostTask( FROM_HERE, base::Bind(&SendProcess::SendFrame, base::Unretained(this))); } @@ -300,7 +302,8 @@ class SendProcess { scoped_refptr<base::SingleThreadTaskRunner> test_app_thread_proxy_; const VideoSenderConfig video_config_; int audio_diff_; - const scoped_refptr<FrameInput> frame_input_; + const scoped_refptr<AudioFrameInput> audio_frame_input_; + const scoped_refptr<VideoFrameInput> video_frame_input_; FILE* video_file_; uint8 synthetic_count_; base::TickClock* const clock_; // Not owned by this class. @@ -336,8 +339,9 @@ void LogRawEvents( } void InitializationResult(media::cast::CastInitializationStatus result) { - CHECK_EQ(result, media::cast::STATUS_INITIALIZED); - VLOG(1) << "Cast Sender initialized"; + bool end_result = result == media::cast::STATUS_AUDIO_INITIALIZED || + result == media::cast::STATUS_VIDEO_INITIALIZED; + CHECK(end_result) << "Cast sender uninitialized"; } net::IPEndPoint CreateUDPAddress(std::string ip_str, int port) { @@ -359,14 +363,14 @@ void WriteLogsToFileAndStopSubscribing( media::cast::FrameEventMap frame_events; media::cast::PacketEventMap packet_events; media::cast::RtpTimestamp first_rtp_timestamp; - video_event_subscriber->GetEventsAndReset(&frame_events, &packet_events, - &first_rtp_timestamp); + video_event_subscriber->GetEventsAndReset( + &frame_events, &packet_events, &first_rtp_timestamp); VLOG(0) << "Video frame map size: " << frame_events.size(); VLOG(0) << "Video packet map size: " << packet_events.size(); - if (!serializer.SerializeEventsForStream(false, frame_events, packet_events, - first_rtp_timestamp)) { + if (!serializer.SerializeEventsForStream( + false, frame_events, packet_events, first_rtp_timestamp)) { VLOG(1) << "Failed to serialize video events."; return; } @@ -377,14 +381,14 @@ void WriteLogsToFileAndStopSubscribing( // Serialize audio events. cast_environment->Logging()->RemoveRawEventSubscriber( audio_event_subscriber.get()); - audio_event_subscriber->GetEventsAndReset(&frame_events, &packet_events, - &first_rtp_timestamp); + audio_event_subscriber->GetEventsAndReset( + &frame_events, &packet_events, &first_rtp_timestamp); VLOG(0) << "Audio frame map size: " << frame_events.size(); VLOG(0) << "Audio packet map size: " << packet_events.size(); - if (!serializer.SerializeEventsForStream(true, frame_events, packet_events, - first_rtp_timestamp)) { + if (!serializer.SerializeEventsForStream( + true, frame_events, packet_events, first_rtp_timestamp)) { VLOG(1) << "Failed to serialize audio events."; return; } @@ -406,7 +410,6 @@ void WriteLogsToFileAndStopSubscribing( int main(int argc, char** argv) { base::AtExitManager at_exit; - VLOG(1) << "Cast Sender"; base::Thread test_thread("Cast sender test app thread"); base::Thread audio_thread("Cast audio encoder thread"); base::Thread video_thread("Cast video encoder thread"); @@ -472,23 +475,25 @@ int main(int argc, char** argv) { transport_sender->InitializeAudio(transport_audio_config); transport_sender->InitializeVideo(transport_video_config); - scoped_ptr<media::cast::CastSender> cast_sender( - media::cast::CastSender::CreateCastSender( - cast_environment, - &audio_config, - &video_config, - NULL, // gpu_factories. - base::Bind(&InitializationResult), - transport_sender.get())); + scoped_ptr<media::cast::CastSender> cast_sender = + media::cast::CastSender::Create(cast_environment, transport_sender.get()); + + cast_sender->InitializeVideo( + video_config, base::Bind(&InitializationResult), NULL); + cast_sender->InitializeAudio(audio_config, base::Bind(&InitializationResult)); transport_sender->SetPacketReceiver(cast_sender->packet_receiver()); - media::cast::FrameInput* frame_input = cast_sender->frame_input(); + scoped_refptr<media::cast::AudioFrameInput> audio_frame_input = + cast_sender->audio_frame_input(); + scoped_refptr<media::cast::VideoFrameInput> video_frame_input = + cast_sender->video_frame_input(); scoped_ptr<media::cast::SendProcess> send_process( new media::cast::SendProcess(test_thread.message_loop_proxy(), cast_environment->Clock(), video_config, - frame_input)); + audio_frame_input, + video_frame_input)); // Set up event subscribers. int logging_duration = media::cast::GetLoggingDuration(); diff --git a/media/cast/test/utility/in_process_receiver.cc b/media/cast/test/utility/in_process_receiver.cc index f916dbb..c8dfc28 100644 --- a/media/cast/test/utility/in_process_receiver.cc +++ b/media/cast/test/utility/in_process_receiver.cc @@ -67,8 +67,8 @@ void InProcessReceiver::StartOnMainThread() { remote_end_point_, base::Bind(&InProcessReceiver::UpdateCastTransportStatus, base::Unretained(this)))); - cast_receiver_.reset(CastReceiver::CreateCastReceiver( - cast_environment_, audio_config_, video_config_, transport_.get())); + cast_receiver_ = CastReceiver::Create( + cast_environment_, audio_config_, video_config_, transport_.get()); // TODO(hubbe): Make the cast receiver do this automatically. transport_->StartReceiving(cast_receiver_->packet_receiver()); diff --git a/media/cast/video_sender/video_sender.cc b/media/cast/video_sender/video_sender.cc index 148086e..1582bc8 100644 --- a/media/cast/video_sender/video_sender.cc +++ b/media/cast/video_sender/video_sender.cc @@ -95,7 +95,7 @@ VideoSender::VideoSender( scoped_refptr<CastEnvironment> cast_environment, const VideoSenderConfig& video_config, const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, - const CastInitializationCallback& initialization_status, + const CastInitializationCallback& cast_initialization_cb, transport::CastTransportSender* const transport_sender) : rtp_max_delay_(base::TimeDelta::FromMilliseconds( video_config.rtp_config.max_delay_ms)), @@ -149,12 +149,12 @@ VideoSender::VideoSender( video_config.rtcp_c_name)); rtcp_->SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize); - // TODO(pwestin): pass cast_initialization to |video_encoder_| + // TODO(pwestin): pass cast_initialization_cb to |video_encoder_| // and remove this call. cast_environment_->PostTask( CastEnvironment::MAIN, FROM_HERE, - base::Bind(initialization_status, STATUS_INITIALIZED)); + base::Bind(cast_initialization_cb, STATUS_VIDEO_INITIALIZED)); cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_); memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_)); diff --git a/media/cast/video_sender/video_sender.h b/media/cast/video_sender/video_sender.h index 8a59707..8c20a40 100644 --- a/media/cast/video_sender/video_sender.h +++ b/media/cast/video_sender/video_sender.h @@ -47,7 +47,7 @@ class VideoSender : public base::NonThreadSafe, VideoSender(scoped_refptr<CastEnvironment> cast_environment, const VideoSenderConfig& video_config, const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, - const CastInitializationCallback& initialization_status, + const CastInitializationCallback& cast_initialization_cb, transport::CastTransportSender* const transport_sender); virtual ~VideoSender(); diff --git a/media/cast/video_sender/video_sender_unittest.cc b/media/cast/video_sender/video_sender_unittest.cc index 982f7de..0bd4f2f 100644 --- a/media/cast/video_sender/video_sender_unittest.cc +++ b/media/cast/video_sender/video_sender_unittest.cc @@ -63,12 +63,12 @@ class PeerVideoSender : public VideoSender { scoped_refptr<CastEnvironment> cast_environment, const VideoSenderConfig& video_config, const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, - const CastInitializationCallback& initialization_status, + const CastInitializationCallback& cast_initialization_cb, transport::CastTransportSender* const transport_sender) : VideoSender(cast_environment, video_config, gpu_factories, - initialization_status, + cast_initialization_cb, transport_sender) {} using VideoSender::OnReceivedCastFeedback; }; @@ -174,7 +174,7 @@ class VideoSenderTest : public ::testing::Test { } void InitializationResult(CastInitializationStatus result) { - EXPECT_EQ(result, STATUS_INITIALIZED); + EXPECT_EQ(result, STATUS_VIDEO_INITIALIZED); } base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment. |