summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorkmackay <kmackay@chromium.org>2015-10-15 11:11:48 -0700
committerCommit bot <commit-bot@chromium.org>2015-10-15 18:12:42 +0000
commit13e7a879298533b9fb111495c8d7dd1b954aae03 (patch)
tree0290e0142af9b870e59a39aa40c95f6ba8965c06
parenta1f93b879cab1a16c3a8a23b55332c21266ce153 (diff)
downloadchromium_src-13e7a879298533b9fb111495c8d7dd1b954aae03.zip
chromium_src-13e7a879298533b9fb111495c8d7dd1b954aae03.tar.gz
chromium_src-13e7a879298533b9fb111495c8d7dd1b954aae03.tar.bz2
Upgrade to new CMA backend API
Review URL: https://codereview.chromium.org/1372393007 Cr-Commit-Position: refs/heads/master@{#354307}
-rw-r--r--chromecast/browser/media/cma_media_pipeline_client.cc2
-rw-r--r--chromecast/browser/media/cma_media_pipeline_client.h10
-rw-r--r--chromecast/browser/media/cma_message_filter_host.h8
-rw-r--r--chromecast/browser/media/media_pipeline_host.cc28
-rw-r--r--chromecast/browser/media/media_pipeline_host.h16
-rw-r--r--chromecast/chromecast.gyp10
-rw-r--r--chromecast/chromecast_tests.gypi17
-rw-r--r--chromecast/media/BUILD.gn12
-rw-r--r--chromecast/media/audio/BUILD.gn5
-rw-r--r--chromecast/media/audio/cast_audio_output_stream.cc181
-rw-r--r--chromecast/media/audio/cast_audio_output_stream.h4
-rw-r--r--chromecast/media/audio/cast_audio_output_stream_unittest.cc303
-rw-r--r--chromecast/media/base/cast_media_default.cc2
-rw-r--r--chromecast/media/cma/BUILD.gn1
-rw-r--r--chromecast/media/cma/backend/BUILD.gn14
-rw-r--r--chromecast/media/cma/backend/audio_pipeline_device_default.cc82
-rw-r--r--chromecast/media/cma/backend/audio_pipeline_device_default.h55
-rw-r--r--chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc263
-rw-r--r--chromecast/media/cma/backend/media_clock_device_default.cc114
-rw-r--r--chromecast/media/cma/backend/media_clock_device_default.h45
-rw-r--r--chromecast/media/cma/backend/media_component_device_default.cc241
-rw-r--r--chromecast/media/cma/backend/media_component_device_default.h91
-rw-r--r--chromecast/media/cma/backend/media_pipeline_backend_default.cc148
-rw-r--r--chromecast/media/cma/backend/media_pipeline_backend_default.h39
-rw-r--r--chromecast/media/cma/backend/video_pipeline_device_default.cc82
-rw-r--r--chromecast/media/cma/backend/video_pipeline_device_default.h55
-rw-r--r--chromecast/media/cma/base/cast_decoder_buffer_impl.cc5
-rw-r--r--chromecast/media/cma/base/cast_decoder_buffer_impl.h1
-rw-r--r--chromecast/media/cma/filters/BUILD.gn26
-rw-r--r--chromecast/media/cma/pipeline/BUILD.gn11
-rw-r--r--chromecast/media/cma/pipeline/audio_pipeline.cc17
-rw-r--r--chromecast/media/cma/pipeline/audio_pipeline.h34
-rw-r--r--chromecast/media/cma/pipeline/audio_pipeline_impl.cc59
-rw-r--r--chromecast/media/cma/pipeline/audio_pipeline_impl.h21
-rw-r--r--chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc58
-rw-r--r--chromecast/media/cma/pipeline/av_pipeline_impl.cc104
-rw-r--r--chromecast/media/cma/pipeline/av_pipeline_impl.h37
-rw-r--r--chromecast/media/cma/pipeline/frame_status_cb_impl.cc30
-rw-r--r--chromecast/media/cma/pipeline/frame_status_cb_impl.h38
-rw-r--r--chromecast/media/cma/pipeline/media_component_device_client_impl.cc31
-rw-r--r--chromecast/media/cma/pipeline/media_component_device_client_impl.h37
-rw-r--r--chromecast/media/cma/pipeline/media_pipeline.h71
-rw-r--r--chromecast/media/cma/pipeline/media_pipeline_client.h4
-rw-r--r--chromecast/media/cma/pipeline/media_pipeline_impl.cc167
-rw-r--r--chromecast/media/cma/pipeline/media_pipeline_impl.h67
-rw-r--r--chromecast/media/cma/pipeline/video_pipeline.cc17
-rw-r--r--chromecast/media/cma/pipeline/video_pipeline.h28
-rw-r--r--chromecast/media/cma/pipeline/video_pipeline_device_client_impl.cc33
-rw-r--r--chromecast/media/cma/pipeline/video_pipeline_device_client_impl.h39
-rw-r--r--chromecast/media/cma/pipeline/video_pipeline_impl.cc53
-rw-r--r--chromecast/media/cma/pipeline/video_pipeline_impl.h21
-rw-r--r--chromecast/media/cma/test/cma_end_to_end_test.cc106
-rw-r--r--chromecast/media/cma/test/media_component_device_feeder_for_test.cc130
-rw-r--r--chromecast/media/cma/test/media_component_device_feeder_for_test.h68
-rw-r--r--chromecast/media/media.gyp44
-rw-r--r--chromecast/public/cast_media_shlib.h6
-rw-r--r--chromecast/public/media/BUILD.gn4
-rw-r--r--chromecast/public/media/audio_pipeline_device.h35
-rw-r--r--chromecast/public/media/media_clock_device.h63
-rw-r--r--chromecast/public/media/media_component_device.h142
-rw-r--r--chromecast/public/media/media_pipeline_backend.h194
-rw-r--r--chromecast/public/media/media_pipeline_device_params.h4
-rw-r--r--chromecast/public/media/video_pipeline_device.h50
-rw-r--r--chromecast/renderer/media/BUILD.gn29
-rw-r--r--chromecast/renderer/media/DEPS3
-rw-r--r--chromecast/renderer/media/audio_pipeline_proxy.cc10
-rw-r--r--chromecast/renderer/media/audio_pipeline_proxy.h14
-rw-r--r--chromecast/renderer/media/chromecast_media_renderer_factory.cc28
-rw-r--r--chromecast/renderer/media/chromecast_media_renderer_factory.h2
-rw-r--r--chromecast/renderer/media/cma_renderer.cc (renamed from chromecast/media/cma/filters/cma_renderer.cc)57
-rw-r--r--chromecast/renderer/media/cma_renderer.h (renamed from chromecast/media/cma/filters/cma_renderer.h)37
-rw-r--r--chromecast/renderer/media/demuxer_stream_adapter.cc (renamed from chromecast/media/cma/filters/demuxer_stream_adapter.cc)2
-rw-r--r--chromecast/renderer/media/demuxer_stream_adapter.h (renamed from chromecast/media/cma/filters/demuxer_stream_adapter.h)10
-rw-r--r--chromecast/renderer/media/demuxer_stream_adapter_unittest.cc (renamed from chromecast/media/cma/filters/demuxer_stream_adapter_unittest.cc)35
-rw-r--r--chromecast/renderer/media/demuxer_stream_for_test.cc (renamed from chromecast/media/cma/test/demuxer_stream_for_test.cc)21
-rw-r--r--chromecast/renderer/media/demuxer_stream_for_test.h (renamed from chromecast/media/cma/test/demuxer_stream_for_test.h)9
-rw-r--r--chromecast/renderer/media/hole_frame_factory.cc (renamed from chromecast/media/cma/filters/hole_frame_factory.cc)11
-rw-r--r--chromecast/renderer/media/hole_frame_factory.h (renamed from chromecast/media/cma/filters/hole_frame_factory.h)6
-rw-r--r--chromecast/renderer/media/media_pipeline_proxy.cc10
-rw-r--r--chromecast/renderer/media/media_pipeline_proxy.h41
-rw-r--r--chromecast/renderer/media/multi_demuxer_stream_adapter_unittest.cc (renamed from chromecast/media/cma/filters/multi_demuxer_stream_adapter_unittest.cc)24
-rw-r--r--chromecast/renderer/media/video_pipeline_proxy.cc6
-rw-r--r--chromecast/renderer/media/video_pipeline_proxy.h12
83 files changed, 1325 insertions, 2725 deletions
diff --git a/chromecast/browser/media/cma_media_pipeline_client.cc b/chromecast/browser/media/cma_media_pipeline_client.cc
index d19aa54..0507d87 100644
--- a/chromecast/browser/media/cma_media_pipeline_client.cc
+++ b/chromecast/browser/media/cma_media_pipeline_client.cc
@@ -16,7 +16,7 @@ CmaMediaPipelineClient::~CmaMediaPipelineClient() {}
scoped_ptr<MediaPipelineBackend>
CmaMediaPipelineClient::CreateMediaPipelineBackend(
- const media::MediaPipelineDeviceParams& params) {
+ const MediaPipelineDeviceParams& params) {
return make_scoped_ptr(CastMediaShlib::CreateMediaPipelineBackend(params));
}
diff --git a/chromecast/browser/media/cma_media_pipeline_client.h b/chromecast/browser/media/cma_media_pipeline_client.h
index f7ecd4b..4caa59d 100644
--- a/chromecast/browser/media/cma_media_pipeline_client.h
+++ b/chromecast/browser/media/cma_media_pipeline_client.h
@@ -14,19 +14,21 @@
namespace chromecast {
namespace media {
-// Class to provide media backend and watch media pipeline status
+struct MediaPipelineDeviceParams;
+
+// Class to provide media backend and watch media pipeline status.
class CmaMediaPipelineClient : public base::RefCounted<CmaMediaPipelineClient>,
public CastResource {
public:
CmaMediaPipelineClient();
virtual scoped_ptr<MediaPipelineBackend> CreateMediaPipelineBackend(
- const media::MediaPipelineDeviceParams& params);
+ const MediaPipelineDeviceParams& params);
virtual void OnMediaPipelineBackendCreated();
virtual void OnMediaPipelineBackendDestroyed();
- // cast::CastResource implementations
+ // cast::CastResource implementation:
void ReleaseResource(CastResource::Resource resource) override;
protected:
@@ -35,7 +37,7 @@ class CmaMediaPipelineClient : public base::RefCounted<CmaMediaPipelineClient>,
private:
friend class base::RefCounted<CmaMediaPipelineClient>;
- // Number of created media pipelines
+ // Number of created media pipelines.
size_t media_pipeline_count_;
base::ThreadChecker thread_checker_;
diff --git a/chromecast/browser/media/cma_message_filter_host.h b/chromecast/browser/media/cma_message_filter_host.h
index c109f93..6a1d6f2 100644
--- a/chromecast/browser/media/cma_message_filter_host.h
+++ b/chromecast/browser/media/cma_message_filter_host.h
@@ -46,12 +46,12 @@ class CmaMessageFilterHost
public:
// Factory method to create a MediaPipelineBackend
typedef base::Callback<scoped_ptr<MediaPipelineBackend>(
- const MediaPipelineDeviceParams&)> CreateDeviceComponentsCB;
+ const MediaPipelineDeviceParams&)> CreateBackendCB;
CmaMessageFilterHost(int render_process_id,
scoped_refptr<CmaMediaPipelineClient> client);
- // content::BrowserMessageFilter implementation.
+ // content::BrowserMessageFilter implementation:
void OnChannelClosing() override;
void OnDestruct() const override;
bool OnMessageReceived(const IPC::Message& message) override;
@@ -114,8 +114,8 @@ class CmaMessageFilterHost
// Render process ID correponding to this message filter.
const int process_id_;
- // Factory function for device-specific part of media pipeline creation
- CreateDeviceComponentsCB create_device_components_cb_;
+ // Factory function for media pipeline backend.
+ CreateBackendCB create_backend_cb_;
scoped_refptr<CmaMediaPipelineClient> client_;
// List of media pipeline and message loop media pipelines are running on.
diff --git a/chromecast/browser/media/media_pipeline_host.cc b/chromecast/browser/media/media_pipeline_host.cc
index 013a40b..45cc897 100644
--- a/chromecast/browser/media/media_pipeline_host.cc
+++ b/chromecast/browser/media/media_pipeline_host.cc
@@ -53,10 +53,9 @@ MediaPipelineHost::~MediaPipelineHost() {
media_track_map_.clear();
}
-void MediaPipelineHost::Initialize(
- LoadType load_type,
- const MediaPipelineClient& client,
- const CreateDeviceComponentsCB& create_device_components_cb) {
+void MediaPipelineHost::Initialize(LoadType load_type,
+ const MediaPipelineClient& client,
+ const CreateBackendCB& create_backend_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
media_pipeline_.reset(new MediaPipelineImpl());
task_runner_.reset(new TaskRunnerImpl());
@@ -67,8 +66,8 @@ void MediaPipelineHost::Initialize(
MediaPipelineDeviceParams default_parameters(sync_type, task_runner_.get());
media_pipeline_->SetClient(client);
- media_pipeline_->Initialize(
- load_type, create_device_components_cb.Run(default_parameters).Pass());
+ media_pipeline_->Initialize(load_type,
+ create_backend_cb.Run(default_parameters).Pass());
}
void MediaPipelineHost::SetAvPipe(
@@ -99,13 +98,10 @@ void MediaPipelineHost::SetAvPipe(
}
media_track_host->pipe_write_cb = frame_provider_host->GetFifoWriteEventCb();
- scoped_ptr<CodedFrameProvider> frame_provider(frame_provider_host.release());
if (track_id == kAudioTrackId) {
- media_pipeline_->GetAudioPipelineImpl()->SetCodedFrameProvider(
- frame_provider.Pass());
+ audio_frame_provider_ = frame_provider_host.Pass();
} else {
- media_pipeline_->GetVideoPipelineImpl()->SetCodedFrameProvider(
- frame_provider.Pass());
+ video_frame_provider_ = frame_provider_host.Pass();
}
av_pipe_set_cb.Run();
}
@@ -117,21 +113,19 @@ void MediaPipelineHost::AudioInitialize(
const ::media::PipelineStatusCB& status_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
CHECK(track_id == kAudioTrackId);
- media_pipeline_->GetAudioPipeline()->SetClient(client);
media_pipeline_->InitializeAudio(
- config, scoped_ptr<CodedFrameProvider>(), status_cb);
+ config, client, audio_frame_provider_.Pass(), status_cb);
}
void MediaPipelineHost::VideoInitialize(
TrackId track_id,
const VideoPipelineClient& client,
- const std::vector<::media::VideoDecoderConfig>& configs,
+ const std::vector< ::media::VideoDecoderConfig>& configs,
const ::media::PipelineStatusCB& status_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
CHECK(track_id == kVideoTrackId);
- media_pipeline_->GetVideoPipeline()->SetClient(client);
media_pipeline_->InitializeVideo(
- configs, scoped_ptr<CodedFrameProvider>(), status_cb);
+ configs, client, video_frame_provider_.Pass(), status_cb);
}
void MediaPipelineHost::StartPlayingFrom(base::TimeDelta time) {
@@ -157,7 +151,7 @@ void MediaPipelineHost::SetPlaybackRate(double playback_rate) {
void MediaPipelineHost::SetVolume(TrackId track_id, float volume) {
DCHECK(thread_checker_.CalledOnValidThread());
CHECK(track_id == kAudioTrackId);
- media_pipeline_->GetAudioPipeline()->SetVolume(volume);
+ media_pipeline_->SetVolume(volume);
}
void MediaPipelineHost::SetCdm(BrowserCdmCast* cdm) {
diff --git a/chromecast/browser/media/media_pipeline_host.h b/chromecast/browser/media/media_pipeline_host.h
index db26bb9..252a548 100644
--- a/chromecast/browser/media/media_pipeline_host.h
+++ b/chromecast/browser/media/media_pipeline_host.h
@@ -33,24 +33,25 @@ class TaskRunnerImpl;
namespace media {
struct AvPipelineClient;
class BrowserCdmCast;
-class MediaPipelineBackend;
struct MediaPipelineClient;
+struct VideoPipelineClient;
+class CodedFrameProvider;
+class MediaPipelineBackend;
struct MediaPipelineDeviceParams;
class MediaPipelineImpl;
-struct VideoPipelineClient;
class MediaPipelineHost {
public:
// Factory method to create a MediaPipelineBackend
- typedef base::Callback<scoped_ptr<media::MediaPipelineBackend>(
- const MediaPipelineDeviceParams&)> CreateDeviceComponentsCB;
+ typedef base::Callback<scoped_ptr<MediaPipelineBackend>(
+ const MediaPipelineDeviceParams&)> CreateBackendCB;
MediaPipelineHost();
~MediaPipelineHost();
void Initialize(LoadType load_type,
const MediaPipelineClient& client,
- const CreateDeviceComponentsCB& create_device_components_cb);
+ const CreateBackendCB& create_backend_cb);
void SetAvPipe(TrackId track_id,
scoped_ptr<base::SharedMemory> shared_mem,
@@ -62,7 +63,7 @@ class MediaPipelineHost {
const ::media::PipelineStatusCB& status_cb);
void VideoInitialize(TrackId track_id,
const VideoPipelineClient& client,
- const std::vector<::media::VideoDecoderConfig>& configs,
+ const std::vector< ::media::VideoDecoderConfig>& configs,
const ::media::PipelineStatusCB& status_cb);
void StartPlayingFrom(base::TimeDelta time);
void Flush(const ::media::PipelineStatusCB& status_cb);
@@ -80,6 +81,9 @@ class MediaPipelineHost {
scoped_ptr<TaskRunnerImpl> task_runner_;
scoped_ptr<MediaPipelineImpl> media_pipeline_;
+ scoped_ptr<CodedFrameProvider> audio_frame_provider_;
+ scoped_ptr<CodedFrameProvider> video_frame_provider_;
+
// The shared memory for a track id must be valid until Stop is invoked on
// that track id.
struct MediaTrackHost;
diff --git a/chromecast/chromecast.gyp b/chromecast/chromecast.gyp
index f558b59..e71e25c 100644
--- a/chromecast/chromecast.gyp
+++ b/chromecast/chromecast.gyp
@@ -56,18 +56,14 @@
'public/graphics_properties_shlib.h',
'public/graphics_types.h',
'public/media_codec_support.h',
- 'public/media/audio_pipeline_device.h',
'public/media/cast_decoder_buffer.h',
'public/media/cast_decrypt_config.h',
'public/media/cast_key_system.h',
'public/media/decoder_config.h',
'public/media/decrypt_context.h',
- 'public/media/media_clock_device.h',
- 'public/media/media_component_device.h',
'public/media/media_pipeline_backend.h',
'public/media/media_pipeline_device_params.h',
'public/media/stream_id.h',
- 'public/media/video_pipeline_device.h',
'public/osd_plane.h',
'public/osd_plane_shlib.h',
'public/osd_surface.h',
@@ -723,6 +719,12 @@
'renderer/media/chromecast_media_renderer_factory.h',
'renderer/media/cma_message_filter_proxy.cc',
'renderer/media/cma_message_filter_proxy.h',
+ 'renderer/media/cma_renderer.cc',
+ 'renderer/media/cma_renderer.h',
+ 'renderer/media/demuxer_stream_adapter.cc',
+ 'renderer/media/demuxer_stream_adapter.h',
+ 'renderer/media/hole_frame_factory.cc',
+ 'renderer/media/hole_frame_factory.h',
'renderer/media/media_channel_proxy.cc',
'renderer/media/media_channel_proxy.h',
'renderer/media/media_pipeline_proxy.cc',
diff --git a/chromecast/chromecast_tests.gypi b/chromecast/chromecast_tests.gypi
index ff96324..bc42b1c 100644
--- a/chromecast/chromecast_tests.gypi
+++ b/chromecast/chromecast_tests.gypi
@@ -169,6 +169,7 @@
}],
['OS!="android"', {
'dependencies': [
+ 'cast_renderer_media_unittests',
'cast_shell_unittests',
'cast_shell_browser_test',
'media/media.gyp:cast_media_unittests',
@@ -309,6 +310,22 @@
], # end of targets
}, { # OS!="android"
'targets': [
+ {
+ 'target_name': 'cast_renderer_media_unittests',
+ 'type': '<(gtest_target_type)',
+ 'dependencies': [
+ 'cast_shell_media',
+ '../base/base.gyp:run_all_unittests',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'renderer/media/demuxer_stream_adapter_unittest.cc',
+ 'renderer/media/demuxer_stream_for_test.cc',
+ 'renderer/media/demuxer_stream_for_test.h',
+ 'renderer/media/multi_demuxer_stream_adapter_unittest.cc',
+ ],
+ }, # end of cast_renderer_media_unittests
# GN target: //chromecast/browser:test_support
{
'target_name': 'cast_shell_test_support',
diff --git a/chromecast/media/BUILD.gn b/chromecast/media/BUILD.gn
index 4961a3f7..f12385b 100644
--- a/chromecast/media/BUILD.gn
+++ b/chromecast/media/BUILD.gn
@@ -21,20 +21,14 @@ test("cast_media_unittests") {
"cma/base/balanced_media_task_runner_unittest.cc",
"cma/base/buffering_controller_unittest.cc",
"cma/base/buffering_frame_provider_unittest.cc",
- "cma/filters/demuxer_stream_adapter_unittest.cc",
- "cma/filters/multi_demuxer_stream_adapter_unittest.cc",
"cma/ipc/media_message_fifo_unittest.cc",
"cma/ipc/media_message_unittest.cc",
"cma/ipc_streamer/av_streamer_unittest.cc",
"cma/pipeline/audio_video_pipeline_impl_unittest.cc",
- "cma/test/demuxer_stream_for_test.cc",
- "cma/test/demuxer_stream_for_test.h",
"cma/test/frame_generator_for_test.cc",
"cma/test/frame_generator_for_test.h",
"cma/test/frame_segmenter_for_test.cc",
"cma/test/frame_segmenter_for_test.h",
- "cma/test/media_component_device_feeder_for_test.cc",
- "cma/test/media_component_device_feeder_for_test.h",
"cma/test/mock_frame_consumer.cc",
"cma/test/mock_frame_consumer.h",
"cma/test/mock_frame_provider.cc",
@@ -47,6 +41,12 @@ test("cast_media_unittests") {
deps = [
":media",
"//chromecast/media/audio:test_support",
+ "//chromecast/media/base:message_loop",
+ "//chromecast/media/cma/backend",
+ "//chromecast/media/cma/base",
+ "//chromecast/media/cma/ipc",
+ "//chromecast/media/cma/ipc_streamer",
+ "//chromecast/media/cma/pipeline",
"//base",
"//base:i18n",
"//base/test:test_support",
diff --git a/chromecast/media/audio/BUILD.gn b/chromecast/media/audio/BUILD.gn
index a5ca0f1..5d87fa3 100644
--- a/chromecast/media/audio/BUILD.gn
+++ b/chromecast/media/audio/BUILD.gn
@@ -28,8 +28,11 @@ source_set("test_support") {
configs += [ "//chromecast:config" ]
- deps = [
+ public_deps = [
":audio",
+ ]
+
+ deps = [
"//testing/gtest",
]
}
diff --git a/chromecast/media/audio/cast_audio_output_stream.cc b/chromecast/media/audio/cast_audio_output_stream.cc
index 5f0645e..73c935b 100644
--- a/chromecast/media/audio/cast_audio_output_stream.cc
+++ b/chromecast/media/audio/cast_audio_output_stream.cc
@@ -5,6 +5,7 @@
#include "chromecast/media/audio/cast_audio_output_stream.h"
#include "base/bind.h"
+#include "base/callback_helpers.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread_checker.h"
#include "chromecast/base/metrics/cast_metrics_helper.h"
@@ -13,11 +14,8 @@
#include "chromecast/media/base/media_message_loop.h"
#include "chromecast/media/cma/base/cast_decoder_buffer_impl.h"
#include "chromecast/media/cma/base/decoder_buffer_adapter.h"
-#include "chromecast/media/cma/pipeline/frame_status_cb_impl.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
#include "chromecast/public/media/decoder_config.h"
#include "chromecast/public/media/decrypt_context.h"
-#include "chromecast/public/media/media_clock_device.h"
#include "chromecast/public/media/media_pipeline_backend.h"
#include "chromecast/public/media/media_pipeline_device_params.h"
#include "media/base/bind_to_current_loop.h"
@@ -25,28 +23,18 @@
namespace chromecast {
namespace media {
-
namespace {
-bool InitClockDevice(MediaClockDevice* clock_device) {
- DCHECK(clock_device);
- DCHECK_EQ(clock_device->GetState(), MediaClockDevice::kStateUninitialized);
-
- if (!clock_device->SetState(media::MediaClockDevice::kStateIdle))
- return false;
-
- if (!clock_device->ResetTimeline(0))
- return false;
-
- if (!clock_device->SetRate(1.0))
- return false;
- return true;
-}
+MediaPipelineBackend::AudioDecoder* InitializeBackend(
+ const ::media::AudioParameters& audio_params,
+ MediaPipelineBackend* backend,
+ MediaPipelineBackend::Delegate* delegate) {
+ DCHECK(backend);
+ DCHECK(delegate);
-bool InitAudioDevice(const ::media::AudioParameters& audio_params,
- AudioPipelineDevice* audio_device) {
- DCHECK(audio_device);
- DCHECK_EQ(audio_device->GetState(), AudioPipelineDevice::kStateUninitialized);
+ MediaPipelineBackend::AudioDecoder* decoder = backend->CreateAudioDecoder();
+ if (!decoder)
+ return nullptr;
AudioConfig audio_config;
audio_config.codec = kCodecPCM;
@@ -57,114 +45,140 @@ bool InitAudioDevice(const ::media::AudioParameters& audio_params,
audio_config.extra_data = nullptr;
audio_config.extra_data_size = 0;
audio_config.is_encrypted = false;
- if (!audio_device->SetConfig(audio_config))
- return false;
- if (!audio_device->SetState(AudioPipelineDevice::kStateIdle))
- return false;
+ if (!decoder->SetConfig(audio_config))
+ return nullptr;
- return true;
+ if (!backend->Initialize(delegate))
+ return nullptr;
+
+ return decoder;
}
+
} // namespace
// Backend represents a MediaPipelineBackend adapter that runs on cast
// media thread (media::MediaMessageLoop::GetTaskRunner).
// It can be created and destroyed on any thread, but all other member functions
// must be called on a single thread.
-class CastAudioOutputStream::Backend {
+class CastAudioOutputStream::Backend : public MediaPipelineBackend::Delegate {
public:
- typedef base::Callback<void(bool)> PushFrameCompletionCallback;
+ typedef base::Callback<void(bool)> PushBufferCompletionCallback;
Backend(const ::media::AudioParameters& audio_params)
- : audio_params_(audio_params) {
+ : audio_params_(audio_params),
+ decoder_(nullptr),
+ first_start_(true),
+ error_(false),
+ backend_buffer_(nullptr) {
thread_checker_.DetachFromThread();
}
- ~Backend() {}
+ ~Backend() override {}
void Open(CastAudioManager* audio_manager,
bool* success,
base::WaitableEvent* completion_event) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(backend_ == nullptr);
+ DCHECK(audio_manager);
+ DCHECK(success);
+ DCHECK(completion_event);
backend_task_runner_.reset(new TaskRunnerImpl());
MediaPipelineDeviceParams device_params(
MediaPipelineDeviceParams::kModeIgnorePts, backend_task_runner_.get());
-
- scoped_ptr<MediaPipelineBackend> pipeline_backend =
- audio_manager->CreateMediaPipelineBackend(device_params);
- if (pipeline_backend && InitClockDevice(pipeline_backend->GetClock()) &&
- InitAudioDevice(audio_params_, pipeline_backend->GetAudio())) {
- backend_ = pipeline_backend.Pass();
- }
- *success = backend_ != nullptr;
+ backend_ = audio_manager->CreateMediaPipelineBackend(device_params);
+ if (backend_)
+ decoder_ = InitializeBackend(audio_params_, backend_.get(), this);
+ *success = decoder_ != nullptr;
completion_event->Signal();
}
void Close() {
DCHECK(thread_checker_.CalledOnValidThread());
- if (backend_) {
- backend_->GetClock()->SetState(MediaClockDevice::kStateIdle);
- backend_->GetAudio()->SetState(AudioPipelineDevice::kStateIdle);
- }
+ if (backend_)
+ backend_->Stop();
backend_.reset();
backend_task_runner_.reset();
}
void Start() {
DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(backend_);
- MediaClockDevice* clock_device = backend_->GetClock();
- clock_device->SetState(MediaClockDevice::kStateRunning);
- clock_device->SetRate(1.0f);
-
- AudioPipelineDevice* audio_device = backend_->GetAudio();
- audio_device->SetState(AudioPipelineDevice::kStateRunning);
+ if (first_start_) {
+ first_start_ = false;
+ backend_->Start(0);
+ } else {
+ backend_->Resume();
+ }
}
void Stop() {
DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(backend_);
- MediaClockDevice* clock_device = backend_->GetClock();
- clock_device->SetRate(0.0f);
+ backend_->Pause();
}
- void PushFrame(scoped_refptr<media::DecoderBufferBase> decoder_buffer,
- const PushFrameCompletionCallback& completion_cb) {
+ void PushBuffer(scoped_refptr<media::DecoderBufferBase> decoder_buffer,
+ const PushBufferCompletionCallback& completion_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(decoder_);
+ DCHECK(!completion_cb.is_null());
+ DCHECK(completion_cb_.is_null());
+ if (error_) {
+ completion_cb.Run(false);
+ return;
+ }
- AudioPipelineDevice* audio_device = backend_->GetAudio();
- MediaComponentDevice::FrameStatus status =
- audio_device->PushFrame(nullptr, // decrypt_context
- new CastDecoderBufferImpl(decoder_buffer),
- new media::FrameStatusCBImpl(base::Bind(
- &Backend::OnPushFrameStatus,
- base::Unretained(this), completion_cb)));
+ backend_buffer_.set_buffer(decoder_buffer);
- if (status != MediaComponentDevice::kFramePending)
- OnPushFrameStatus(completion_cb, status);
+ MediaPipelineBackend::BufferStatus status =
+ decoder_->PushBuffer(nullptr /* decrypt_context */, &backend_buffer_);
+ completion_cb_ = completion_cb;
+ if (status != MediaPipelineBackend::kBufferPending)
+ OnPushBufferComplete(decoder_, status);
}
void SetVolume(double volume) {
DCHECK(thread_checker_.CalledOnValidThread());
-
- AudioPipelineDevice* audio_device = backend_->GetAudio();
- audio_device->SetStreamVolumeMultiplier(volume);
+ DCHECK(decoder_);
+ decoder_->SetVolume(volume);
}
- private:
- void OnPushFrameStatus(const PushFrameCompletionCallback& completion_cb,
- MediaComponentDevice::FrameStatus status) {
+ // MediaPipelineBackend::Delegate implementation
+ void OnVideoResolutionChanged(MediaPipelineBackend::VideoDecoder* decoder,
+ const Size& size) override {}
+
+ void OnPushBufferComplete(
+ MediaPipelineBackend::Decoder* decoder,
+ MediaPipelineBackend::BufferStatus status) override {
DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK_NE(status, MediaComponentDevice::kFramePending);
+ DCHECK_NE(status, MediaPipelineBackend::kBufferPending);
+
+ base::ResetAndReturn(&completion_cb_)
+ .Run(status == MediaPipelineBackend::kBufferSuccess);
+ }
+
+ void OnEndOfStream(MediaPipelineBackend::Decoder* decoder) override {}
- completion_cb.Run(status == MediaComponentDevice::kFrameSuccess);
+ void OnDecoderError(MediaPipelineBackend::Decoder* decoder) override {
+ error_ = true;
+ if (!completion_cb_.is_null())
+ OnPushBufferComplete(decoder_, MediaPipelineBackend::kBufferFailed);
}
+ private:
const ::media::AudioParameters audio_params_;
scoped_ptr<MediaPipelineBackend> backend_;
scoped_ptr<TaskRunnerImpl> backend_task_runner_;
+ MediaPipelineBackend::AudioDecoder* decoder_;
+ PushBufferCompletionCallback completion_cb_;
+ bool first_start_;
+ bool error_;
+ CastDecoderBufferImpl backend_buffer_;
base::ThreadChecker thread_checker_;
DISALLOW_COPY_AND_ASSIGN(Backend);
};
@@ -248,7 +262,7 @@ void CastAudioOutputStream::Start(AudioSourceCallback* source_callback) {
next_push_time_ = base::TimeTicks::Now();
if (!push_in_progress_) {
audio_task_runner_->PostTask(FROM_HERE,
- base::Bind(&CastAudioOutputStream::PushFrame,
+ base::Bind(&CastAudioOutputStream::PushBuffer,
weak_factory_.GetWeakPtr()));
push_in_progress_ = true;
}
@@ -288,7 +302,7 @@ void CastAudioOutputStream::OnClosed() {
audio_manager_->ReleaseOutputStream(this);
}
-void CastAudioOutputStream::PushFrame() {
+void CastAudioOutputStream::PushBuffer() {
DCHECK(audio_task_runner_->BelongsToCurrentThread());
DCHECK(push_in_progress_);
@@ -307,15 +321,17 @@ void CastAudioOutputStream::PushFrame() {
audio_bus_->ToInterleaved(frame_count, audio_params_.bits_per_sample() / 8,
decoder_buffer_->writable_data());
- auto completion_cb = ::media::BindToCurrentLoop(base::Bind(
- &CastAudioOutputStream::OnPushFrameComplete, weak_factory_.GetWeakPtr()));
- backend_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&Backend::PushFrame, base::Unretained(backend_.get()),
- decoder_buffer_, completion_cb));
+ auto completion_cb = ::media::BindToCurrentLoop(
+ base::Bind(&CastAudioOutputStream::OnPushBufferComplete,
+ weak_factory_.GetWeakPtr()));
+ backend_task_runner_->PostTask(FROM_HERE,
+ base::Bind(&Backend::PushBuffer,
+ base::Unretained(backend_.get()),
+ decoder_buffer_,
+ completion_cb));
}
-void CastAudioOutputStream::OnPushFrameComplete(bool success) {
+void CastAudioOutputStream::OnPushBufferComplete(bool success) {
DCHECK(audio_task_runner_->BelongsToCurrentThread());
DCHECK(push_in_progress_);
@@ -329,8 +345,8 @@ void CastAudioOutputStream::OnPushFrameComplete(bool success) {
return;
}
- // Schedule next push frame.
- // Need to account for time spent in pulling and pushing frame as well
+ // Schedule next push buffer.
+ // Need to account for time spent in pulling and pushing buffer as well
// as the imprecision of PostDelayedTask().
const base::TimeTicks now = base::TimeTicks::Now();
base::TimeDelta delay = next_push_time_ + buffer_duration_ - now;
@@ -339,7 +355,8 @@ void CastAudioOutputStream::OnPushFrameComplete(bool success) {
audio_task_runner_->PostDelayedTask(
FROM_HERE,
- base::Bind(&CastAudioOutputStream::PushFrame, weak_factory_.GetWeakPtr()),
+ base::Bind(&CastAudioOutputStream::PushBuffer,
+ weak_factory_.GetWeakPtr()),
delay);
push_in_progress_ = true;
}
diff --git a/chromecast/media/audio/cast_audio_output_stream.h b/chromecast/media/audio/cast_audio_output_stream.h
index 222947f..6c54fce 100644
--- a/chromecast/media/audio/cast_audio_output_stream.h
+++ b/chromecast/media/audio/cast_audio_output_stream.h
@@ -39,8 +39,8 @@ class CastAudioOutputStream : public ::media::AudioOutputStream {
class Backend;
void OnClosed();
- void PushFrame();
- void OnPushFrameComplete(bool success);
+ void PushBuffer();
+ void OnPushBufferComplete(bool success);
const ::media::AudioParameters audio_params_;
CastAudioManager* const audio_manager_;
diff --git a/chromecast/media/audio/cast_audio_output_stream_unittest.cc b/chromecast/media/audio/cast_audio_output_stream_unittest.cc
index 4869d1f..7de01be 100644
--- a/chromecast/media/audio/cast_audio_output_stream_unittest.cc
+++ b/chromecast/media/audio/cast_audio_output_stream_unittest.cc
@@ -8,11 +8,10 @@
#include "chromecast/media/audio/cast_audio_manager.h"
#include "chromecast/media/audio/cast_audio_output_stream.h"
#include "chromecast/media/base/media_message_loop.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
+#include "chromecast/media/cma/backend/media_pipeline_backend_default.h"
#include "chromecast/public/media/cast_decoder_buffer.h"
#include "chromecast/public/media/decoder_config.h"
#include "chromecast/public/media/decrypt_context.h"
-#include "chromecast/public/media/media_clock_device.h"
#include "chromecast/public/media/media_pipeline_backend.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -29,31 +28,7 @@ void RunUntilIdle(base::TaskRunner* task_runner) {
completion_event.Wait();
}
-class FakeClockDevice : public MediaClockDevice {
- public:
- FakeClockDevice() : state_(kStateUninitialized), rate_(0.f) {}
- ~FakeClockDevice() override {}
-
- State GetState() const override { return state_; }
- bool SetState(State new_state) override {
- state_ = new_state;
- return true;
- }
- bool ResetTimeline(int64_t time_microseconds) override { return true; }
- bool SetRate(float rate) override {
- rate_ = rate;
- return true;
- }
- int64_t GetTimeMicroseconds() override { return 0; }
-
- float rate() const { return rate_; }
-
- private:
- State state_;
- float rate_;
-};
-
-class FakeAudioPipelineDevice : public AudioPipelineDevice {
+class FakeAudioDecoder : public MediaPipelineBackend::AudioDecoder {
public:
enum PipelineStatus {
PIPELINE_STATUS_OK,
@@ -61,99 +36,127 @@ class FakeAudioPipelineDevice : public AudioPipelineDevice {
PIPELINE_STATUS_ERROR
};
- FakeAudioPipelineDevice()
- : state_(kStateUninitialized),
- volume_multiplier_(1.0f),
+ FakeAudioDecoder()
+ : volume_(1.0f),
pipeline_status_(PIPELINE_STATUS_OK),
- pushed_frame_count_(0) {}
- ~FakeAudioPipelineDevice() override {}
-
- // AudioPipelineDevice overrides.
- void SetClient(Client* client) override {}
- bool SetState(State new_state) override {
- state_ = new_state;
- return true;
- }
- State GetState() const override { return state_; }
- bool SetStartPts(int64_t microseconds) override { return false; }
- FrameStatus PushFrame(DecryptContext* decrypt_context,
- CastDecoderBuffer* buffer,
- FrameStatusCB* completion_cb) override {
- last_frame_decrypt_context_.reset(decrypt_context);
- last_frame_buffer_.reset(buffer);
- last_frame_completion_cb_.reset(completion_cb);
- ++pushed_frame_count_;
+ pending_push_(false),
+ pushed_buffer_count_(0),
+ last_decrypt_context_(nullptr),
+ last_buffer_(nullptr),
+ delegate_(nullptr) {}
+ ~FakeAudioDecoder() override {}
+
+ // MediaPipelineBackend::AudioDecoder overrides.
+ BufferStatus PushBuffer(DecryptContext* decrypt_context,
+ CastDecoderBuffer* buffer) override {
+ last_decrypt_context_ = decrypt_context;
+ last_buffer_ = buffer;
+ ++pushed_buffer_count_;
switch (pipeline_status_) {
case PIPELINE_STATUS_OK:
- return kFrameSuccess;
+ return MediaPipelineBackend::kBufferSuccess;
case PIPELINE_STATUS_BUSY:
- return kFramePending;
+ pending_push_ = true;
+ return MediaPipelineBackend::kBufferPending;
case PIPELINE_STATUS_ERROR:
- return kFrameFailed;
+ return MediaPipelineBackend::kBufferFailed;
default:
NOTREACHED();
+ return MediaPipelineBackend::kBufferFailed;
}
-
- // This will never be reached but is necessary for compiler warnings.
- return kFrameFailed;
}
- RenderingDelay GetRenderingDelay() const override { return RenderingDelay(); }
- bool GetStatistics(Statistics* stats) const override { return false; }
+ void GetStatistics(Statistics* statistics) override {}
bool SetConfig(const AudioConfig& config) override {
config_ = config;
return true;
}
- void SetStreamVolumeMultiplier(float multiplier) override {
- volume_multiplier_ = multiplier;
+ bool SetVolume(float volume) override {
+ volume_ = volume;
+ return true;
}
+ RenderingDelay GetRenderingDelay() override { return RenderingDelay(); }
const AudioConfig& config() const { return config_; }
- float volume_multiplier() const { return volume_multiplier_; }
- void set_pipeline_status(PipelineStatus status) { pipeline_status_ = status; }
- unsigned pushed_frame_count() const { return pushed_frame_count_; }
- DecryptContext* last_frame_decrypt_context() {
- return last_frame_decrypt_context_.get();
+ float volume() const { return volume_; }
+ void set_pipeline_status(PipelineStatus status) {
+ if (pipeline_status_ == PIPELINE_STATUS_BUSY &&
+ status == PIPELINE_STATUS_OK && pending_push_) {
+ pending_push_ = false;
+ delegate_->OnPushBufferComplete(this,
+ MediaPipelineBackend::kBufferSuccess);
+ }
+ pipeline_status_ = status;
}
- CastDecoderBuffer* last_frame_buffer() { return last_frame_buffer_.get(); }
- FrameStatusCB* last_frame_completion_cb() {
- return last_frame_completion_cb_.get();
+ unsigned pushed_buffer_count() const { return pushed_buffer_count_; }
+ DecryptContext* last_decrypt_context() { return last_decrypt_context_; }
+ CastDecoderBuffer* last_buffer() { return last_buffer_; }
+ void set_delegate(MediaPipelineBackend::Delegate* delegate) {
+ delegate_ = delegate;
}
private:
- State state_;
AudioConfig config_;
- float volume_multiplier_;
+ float volume_;
PipelineStatus pipeline_status_;
- unsigned pushed_frame_count_;
- scoped_ptr<DecryptContext> last_frame_decrypt_context_;
- scoped_ptr<CastDecoderBuffer> last_frame_buffer_;
- scoped_ptr<FrameStatusCB> last_frame_completion_cb_;
+ bool pending_push_;
+ int pushed_buffer_count_;
+ DecryptContext* last_decrypt_context_;
+ CastDecoderBuffer* last_buffer_;
+ MediaPipelineBackend::Delegate* delegate_;
};
class FakeMediaPipelineBackend : public MediaPipelineBackend {
public:
+ enum State { kStateStopped, kStateRunning, kStatePaused };
+
+ FakeMediaPipelineBackend() : state_(kStateStopped), audio_decoder_(nullptr) {}
~FakeMediaPipelineBackend() override {}
- MediaClockDevice* GetClock() override {
- if (!clock_device_)
- clock_device_.reset(new FakeClockDevice);
- return clock_device_.get();
- }
- AudioPipelineDevice* GetAudio() override {
- if (!audio_device_)
- audio_device_.reset(new FakeAudioPipelineDevice);
- return audio_device_.get();
+ // MediaPipelineBackend implementation:
+ AudioDecoder* CreateAudioDecoder() override {
+ DCHECK(!audio_decoder_);
+ audio_decoder_ = new FakeAudioDecoder();
+ return audio_decoder_;
}
- VideoPipelineDevice* GetVideo() override {
+ VideoDecoder* CreateVideoDecoder() override {
NOTREACHED();
return nullptr;
}
+ bool Initialize(Delegate* delegate) override {
+ audio_decoder_->set_delegate(delegate);
+ return true;
+ }
+ bool Start(int64_t start_pts) override {
+ DCHECK(state_ == kStateStopped);
+ state_ = kStateRunning;
+ return true;
+ }
+ bool Stop() override {
+ state_ = kStateStopped;
+ return true;
+ }
+ bool Pause() override {
+ DCHECK(state_ == kStateRunning);
+ state_ = kStatePaused;
+ return true;
+ }
+ bool Resume() override {
+ DCHECK(state_ == kStatePaused);
+ state_ = kStateRunning;
+ return true;
+ }
+ int64_t GetCurrentPts() override { return 0; }
+ bool SetPlaybackRate(float rate) override { return true; }
+
+ State state() const { return state_; }
+ FakeAudioDecoder* decoder() const { return audio_decoder_; }
+
private:
- scoped_ptr<FakeClockDevice> clock_device_;
- scoped_ptr<FakeAudioPipelineDevice> audio_device_;
+ State state_;
+ FakeAudioDecoder* audio_decoder_;
};
class FakeAudioSourceCallback
@@ -187,7 +190,8 @@ class FakeAudioManager : public CastAudioManager {
DCHECK(media::MediaMessageLoop::GetTaskRunner()->BelongsToCurrentThread());
DCHECK(!media_pipeline_backend_);
- scoped_ptr<FakeMediaPipelineBackend> backend(new FakeMediaPipelineBackend);
+ scoped_ptr<FakeMediaPipelineBackend> backend(
+ new FakeMediaPipelineBackend());
// Cache the backend locally to be used by tests.
media_pipeline_backend_ = backend.get();
return backend.Pass();
@@ -236,15 +240,14 @@ class CastAudioOutputStreamTest : public ::testing::Test {
return ::media::AudioParameters(format_, channel_layout_, sample_rate_,
bits_per_sample_, frames_per_buffer_);
}
- FakeClockDevice* GetClock() {
- MediaPipelineBackend* backend = audio_manager_->media_pipeline_backend();
- return backend ? static_cast<FakeClockDevice*>(backend->GetClock())
- : nullptr;
+
+ FakeMediaPipelineBackend* GetBackend() {
+ return audio_manager_->media_pipeline_backend();
}
- FakeAudioPipelineDevice* GetAudio() {
- MediaPipelineBackend* backend = audio_manager_->media_pipeline_backend();
- return backend ? static_cast<FakeAudioPipelineDevice*>(backend->GetAudio())
- : nullptr;
+
+ FakeAudioDecoder* GetAudio() {
+ FakeMediaPipelineBackend* backend = GetBackend();
+ return (backend ? backend->decoder() : nullptr);
}
// Synchronous utility functions.
@@ -373,9 +376,9 @@ TEST_F(CastAudioOutputStreamTest, Format) {
ASSERT_TRUE(stream);
EXPECT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
- const AudioConfig& audio_config = audio_device->config();
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
+ const AudioConfig& audio_config = audio_decoder->config();
EXPECT_EQ(kCodecPCM, audio_config.codec);
EXPECT_EQ(kSampleFormatS16, audio_config.sample_format);
EXPECT_FALSE(audio_config.is_encrypted);
@@ -393,9 +396,9 @@ TEST_F(CastAudioOutputStreamTest, ChannelLayout) {
ASSERT_TRUE(stream);
EXPECT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
- const AudioConfig& audio_config = audio_device->config();
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
+ const AudioConfig& audio_config = audio_decoder->config();
EXPECT_EQ(::media::ChannelLayoutToChannelCount(channel_layout_),
audio_config.channel_number);
@@ -409,9 +412,9 @@ TEST_F(CastAudioOutputStreamTest, SampleRate) {
ASSERT_TRUE(stream);
EXPECT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
- const AudioConfig& audio_config = audio_device->config();
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
+ const AudioConfig& audio_config = audio_decoder->config();
EXPECT_EQ(sample_rate_, audio_config.samples_per_second);
CloseStream(stream);
@@ -423,9 +426,9 @@ TEST_F(CastAudioOutputStreamTest, BitsPerSample) {
ASSERT_TRUE(stream);
EXPECT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
- const AudioConfig& audio_config = audio_device->config();
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
+ const AudioConfig& audio_config = audio_decoder->config();
EXPECT_EQ(bits_per_sample_ / 8, audio_config.bytes_per_channel);
CloseStream(stream);
@@ -437,25 +440,19 @@ TEST_F(CastAudioOutputStreamTest, DeviceState) {
EXPECT_FALSE(GetAudio());
EXPECT_TRUE(OpenStream(stream));
- AudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
- FakeClockDevice* clock_device = GetClock();
- ASSERT_TRUE(clock_device);
- EXPECT_EQ(AudioPipelineDevice::kStateIdle, audio_device->GetState());
- EXPECT_EQ(MediaClockDevice::kStateIdle, clock_device->GetState());
- EXPECT_EQ(1.f, clock_device->rate());
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
+ FakeMediaPipelineBackend* backend = GetBackend();
+ ASSERT_TRUE(backend);
+ EXPECT_EQ(FakeMediaPipelineBackend::kStateStopped, backend->state());
scoped_ptr<FakeAudioSourceCallback> source_callback(
new FakeAudioSourceCallback);
StartStream(stream, source_callback.get());
- EXPECT_EQ(AudioPipelineDevice::kStateRunning, audio_device->GetState());
- EXPECT_EQ(MediaClockDevice::kStateRunning, clock_device->GetState());
- EXPECT_EQ(1.f, clock_device->rate());
+ EXPECT_EQ(FakeMediaPipelineBackend::kStateRunning, backend->state());
StopStream(stream);
- EXPECT_EQ(AudioPipelineDevice::kStateRunning, audio_device->GetState());
- EXPECT_EQ(MediaClockDevice::kStateRunning, clock_device->GetState());
- EXPECT_EQ(0.f, clock_device->rate());
+ EXPECT_EQ(FakeMediaPipelineBackend::kStatePaused, backend->state());
CloseStream(stream);
EXPECT_FALSE(GetAudio());
@@ -466,13 +463,12 @@ TEST_F(CastAudioOutputStreamTest, PushFrame) {
ASSERT_TRUE(stream);
EXPECT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
// Verify initial state.
- EXPECT_EQ(0u, audio_device->pushed_frame_count());
- EXPECT_FALSE(audio_device->last_frame_decrypt_context());
- EXPECT_FALSE(audio_device->last_frame_buffer());
- EXPECT_FALSE(audio_device->last_frame_completion_cb());
+ EXPECT_EQ(0u, audio_decoder->pushed_buffer_count());
+ EXPECT_FALSE(audio_decoder->last_decrypt_context());
+ EXPECT_FALSE(audio_decoder->last_buffer());
scoped_ptr<FakeAudioSourceCallback> source_callback(
new FakeAudioSourceCallback);
@@ -480,17 +476,16 @@ TEST_F(CastAudioOutputStreamTest, PushFrame) {
StopStream(stream);
// Verify that the stream pushed frames to the backend.
- EXPECT_LT(0u, audio_device->pushed_frame_count());
+ EXPECT_LT(0u, audio_decoder->pushed_buffer_count());
// DecryptContext is always NULL becuase of "raw" audio.
- EXPECT_FALSE(audio_device->last_frame_decrypt_context());
- EXPECT_TRUE(audio_device->last_frame_buffer());
- EXPECT_TRUE(audio_device->last_frame_completion_cb());
+ EXPECT_FALSE(audio_decoder->last_decrypt_context());
+ EXPECT_TRUE(audio_decoder->last_buffer());
// Verify decoder buffer.
::media::AudioParameters audio_params = GetAudioParams();
const size_t expected_frame_size =
static_cast<size_t>(audio_params.GetBytesPerBuffer());
- const CastDecoderBuffer* buffer = audio_device->last_frame_buffer();
+ const CastDecoderBuffer* buffer = audio_decoder->last_buffer();
EXPECT_TRUE(buffer->data());
EXPECT_EQ(expected_frame_size, buffer->data_size());
EXPECT_FALSE(buffer->decrypt_config()); // Null because of raw audio.
@@ -507,17 +502,16 @@ TEST_F(CastAudioOutputStreamTest, DeviceBusy) {
ASSERT_TRUE(stream);
EXPECT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
- audio_device->set_pipeline_status(
- FakeAudioPipelineDevice::PIPELINE_STATUS_BUSY);
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
+ audio_decoder->set_pipeline_status(FakeAudioDecoder::PIPELINE_STATUS_BUSY);
scoped_ptr<FakeAudioSourceCallback> source_callback(
new FakeAudioSourceCallback);
StartStream(stream, source_callback.get());
// Make sure that one frame was pushed.
- EXPECT_EQ(1u, audio_device->pushed_frame_count());
+ EXPECT_EQ(1u, audio_decoder->pushed_buffer_count());
// No error must be reported to source callback.
EXPECT_FALSE(source_callback->error());
@@ -528,17 +522,20 @@ TEST_F(CastAudioOutputStreamTest, DeviceBusy) {
base::PlatformThread::Sleep(pause);
RunUntilIdle(audio_task_runner_.get());
RunUntilIdle(backend_task_runner_.get());
- EXPECT_EQ(1u, audio_device->pushed_frame_count());
+ EXPECT_EQ(1u, audio_decoder->pushed_buffer_count());
// Unblock the pipeline and verify that PushFrame resumes.
- audio_device->set_pipeline_status(
- FakeAudioPipelineDevice::PIPELINE_STATUS_OK);
- audio_device->last_frame_completion_cb()->Run(
- MediaComponentDevice::kFrameSuccess);
+ // (have to post because this directly calls buffer complete)
+ backend_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(&FakeAudioDecoder::set_pipeline_status,
+ base::Unretained(audio_decoder),
+ FakeAudioDecoder::PIPELINE_STATUS_OK));
+
base::PlatformThread::Sleep(pause);
RunUntilIdle(audio_task_runner_.get());
RunUntilIdle(backend_task_runner_.get());
- EXPECT_LT(1u, audio_device->pushed_frame_count());
+ EXPECT_LT(1u, audio_decoder->pushed_buffer_count());
EXPECT_FALSE(source_callback->error());
StopStream(stream);
@@ -550,17 +547,16 @@ TEST_F(CastAudioOutputStreamTest, DeviceError) {
ASSERT_TRUE(stream);
EXPECT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
- audio_device->set_pipeline_status(
- FakeAudioPipelineDevice::PIPELINE_STATUS_ERROR);
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
+ audio_decoder->set_pipeline_status(FakeAudioDecoder::PIPELINE_STATUS_ERROR);
scoped_ptr<FakeAudioSourceCallback> source_callback(
new FakeAudioSourceCallback);
StartStream(stream, source_callback.get());
// Make sure that AudioOutputStream attempted to push the initial frame.
- EXPECT_LT(0u, audio_device->pushed_frame_count());
+ EXPECT_LT(0u, audio_decoder->pushed_buffer_count());
// AudioOutputStream must report error to source callback.
EXPECT_TRUE(source_callback->error());
@@ -572,17 +568,17 @@ TEST_F(CastAudioOutputStreamTest, Volume) {
::media::AudioOutputStream* stream = CreateStream();
ASSERT_TRUE(stream);
ASSERT_TRUE(OpenStream(stream));
- FakeAudioPipelineDevice* audio_device = GetAudio();
- ASSERT_TRUE(audio_device);
+ FakeAudioDecoder* audio_decoder = GetAudio();
+ ASSERT_TRUE(audio_decoder);
double volume = GetStreamVolume(stream);
EXPECT_EQ(1.0, volume);
- EXPECT_EQ(1.0f, audio_device->volume_multiplier());
+ EXPECT_EQ(1.0f, audio_decoder->volume());
SetStreamVolume(stream, 0.5);
volume = GetStreamVolume(stream);
EXPECT_EQ(0.5, volume);
- EXPECT_EQ(0.5f, audio_device->volume_multiplier());
+ EXPECT_EQ(0.5f, audio_decoder->volume());
CloseStream(stream);
}
@@ -606,12 +602,9 @@ TEST_F(CastAudioOutputStreamTest, StartStopStart) {
RunUntilIdle(audio_task_runner_.get());
RunUntilIdle(backend_task_runner_.get());
- AudioPipelineDevice* audio_device = GetAudio();
- FakeClockDevice* clock_device = GetClock();
- ASSERT_TRUE(audio_device && clock_device);
- EXPECT_EQ(AudioPipelineDevice::kStateRunning, audio_device->GetState());
- EXPECT_EQ(MediaClockDevice::kStateRunning, clock_device->GetState());
- EXPECT_EQ(1.f, clock_device->rate());
+ FakeAudioDecoder* audio_device = GetAudio();
+ EXPECT_TRUE(audio_device);
+ EXPECT_EQ(FakeMediaPipelineBackend::kStateRunning, GetBackend()->state());
CloseStream(stream);
}
diff --git a/chromecast/media/base/cast_media_default.cc b/chromecast/media/base/cast_media_default.cc
index 9c83431..1f44a68 100644
--- a/chromecast/media/base/cast_media_default.cc
+++ b/chromecast/media/base/cast_media_default.cc
@@ -39,7 +39,7 @@ VideoPlane* CastMediaShlib::GetVideoPlane() {
MediaPipelineBackend* CastMediaShlib::CreateMediaPipelineBackend(
const MediaPipelineDeviceParams& params) {
- return new MediaPipelineBackendDefault(params);
+ return new MediaPipelineBackendDefault();
}
MediaCodecSupportShlib::CodecSupport MediaCodecSupportShlib::IsSupported(
diff --git a/chromecast/media/cma/BUILD.gn b/chromecast/media/cma/BUILD.gn
index c9da629..b4b1f6d 100644
--- a/chromecast/media/cma/BUILD.gn
+++ b/chromecast/media/cma/BUILD.gn
@@ -6,7 +6,6 @@ group("cma") {
deps = [
"//chromecast/media/cma/backend",
"//chromecast/media/cma/base",
- "//chromecast/media/cma/filters",
"//chromecast/media/cma/ipc",
"//chromecast/media/cma/ipc_streamer",
"//chromecast/media/cma/pipeline",
diff --git a/chromecast/media/cma/backend/BUILD.gn b/chromecast/media/cma/backend/BUILD.gn
index 7507b7d..57f67ae 100644
--- a/chromecast/media/cma/backend/BUILD.gn
+++ b/chromecast/media/cma/backend/BUILD.gn
@@ -4,22 +4,8 @@
source_set("backend") {
sources = [
- "audio_pipeline_device_default.cc",
- "audio_pipeline_device_default.h",
- "media_clock_device_default.cc",
- "media_clock_device_default.h",
- "media_component_device_default.cc",
- "media_component_device_default.h",
"media_pipeline_backend_default.cc",
"media_pipeline_backend_default.h",
- "video_pipeline_device_default.cc",
- "video_pipeline_device_default.h",
- ]
-
- # This target cannot depend on //media. Include these headers directly.
- sources += [
- "//media/base/media_export.h",
- "//media/base/timestamp_constants.h",
]
public_deps = [
diff --git a/chromecast/media/cma/backend/audio_pipeline_device_default.cc b/chromecast/media/cma/backend/audio_pipeline_device_default.cc
deleted file mode 100644
index dede977..0000000
--- a/chromecast/media/cma/backend/audio_pipeline_device_default.cc
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/backend/audio_pipeline_device_default.h"
-
-#include "chromecast/media/cma/backend/media_component_device_default.h"
-
-namespace chromecast {
-namespace media {
-
-AudioPipelineDeviceDefault::AudioPipelineDeviceDefault(
- const MediaPipelineDeviceParams& params,
- MediaClockDevice* media_clock_device)
- : pipeline_(new MediaComponentDeviceDefault(params, media_clock_device)) {
- thread_checker_.DetachFromThread();
-}
-
-AudioPipelineDeviceDefault::~AudioPipelineDeviceDefault() {
-}
-
-void AudioPipelineDeviceDefault::SetClient(Client* client) {
- pipeline_->SetClient(client);
-}
-
-MediaComponentDevice::State AudioPipelineDeviceDefault::GetState() const {
- return pipeline_->GetState();
-}
-
-bool AudioPipelineDeviceDefault::SetState(State new_state) {
- bool success = pipeline_->SetState(new_state);
- if (!success)
- return false;
-
- if (new_state == kStateIdle) {
- DCHECK(IsValidConfig(config_));
- }
- if (new_state == kStateUninitialized) {
- config_ = AudioConfig();
- }
- return true;
-}
-
-bool AudioPipelineDeviceDefault::SetStartPts(int64_t time_microseconds) {
- return pipeline_->SetStartPts(time_microseconds);
-}
-
-MediaComponentDevice::FrameStatus AudioPipelineDeviceDefault::PushFrame(
- DecryptContext* decrypt_context,
- CastDecoderBuffer* buffer,
- FrameStatusCB* completion_cb) {
- return pipeline_->PushFrame(decrypt_context, buffer, completion_cb);
-}
-
-AudioPipelineDeviceDefault::RenderingDelay
-AudioPipelineDeviceDefault::GetRenderingDelay() const {
- return pipeline_->GetRenderingDelay();
-}
-
-bool AudioPipelineDeviceDefault::SetConfig(const AudioConfig& config) {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!IsValidConfig(config))
- return false;
- config_ = config;
- if (config.extra_data_size > 0)
- config_extra_data_.assign(config.extra_data,
- config.extra_data + config.extra_data_size);
- else
- config_extra_data_.clear();
- return true;
-}
-
-void AudioPipelineDeviceDefault::SetStreamVolumeMultiplier(float multiplier) {
- DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-bool AudioPipelineDeviceDefault::GetStatistics(Statistics* stats) const {
- return pipeline_->GetStatistics(stats);
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/backend/audio_pipeline_device_default.h b/chromecast/media/cma/backend/audio_pipeline_device_default.h
deleted file mode 100644
index 814a027..0000000
--- a/chromecast/media/cma/backend/audio_pipeline_device_default.h
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_BACKEND_AUDIO_PIPELINE_DEVICE_DEFAULT_H_
-#define CHROMECAST_MEDIA_CMA_BACKEND_AUDIO_PIPELINE_DEVICE_DEFAULT_H_
-
-#include <vector>
-
-#include "base/macros.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/threading/thread_checker.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
-#include "chromecast/public/media/decoder_config.h"
-
-namespace chromecast {
-namespace media {
-
-class MediaClockDevice;
-class MediaComponentDeviceDefault;
-struct MediaPipelineDeviceParams;
-
-class AudioPipelineDeviceDefault : public AudioPipelineDevice {
- public:
- AudioPipelineDeviceDefault(const MediaPipelineDeviceParams& params,
- MediaClockDevice* media_clock_device);
- ~AudioPipelineDeviceDefault() override;
-
- // AudioPipelineDevice implementation.
- void SetClient(Client* client) override;
- State GetState() const override;
- bool SetState(State new_state) override;
- bool SetStartPts(int64_t time_microseconds) override;
- FrameStatus PushFrame(DecryptContext* decrypt_context,
- CastDecoderBuffer* buffer,
- FrameStatusCB* completion_cb) override;
- RenderingDelay GetRenderingDelay() const override;
- bool SetConfig(const AudioConfig& config) override;
- void SetStreamVolumeMultiplier(float multiplier) override;
- bool GetStatistics(Statistics* stats) const override;
-
- private:
- scoped_ptr<MediaComponentDeviceDefault> pipeline_;
-
- AudioConfig config_;
- std::vector<uint8_t> config_extra_data_;
-
- base::ThreadChecker thread_checker_;
- DISALLOW_COPY_AND_ASSIGN(AudioPipelineDeviceDefault);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_AUDIO_PIPELINE_DEVICE_DEFAULT_H_
diff --git a/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
index 84004a8..beca1ab 100644
--- a/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
+++ b/chromecast/media/cma/backend/audio_video_pipeline_device_unittest.cc
@@ -20,18 +20,15 @@
#include "base/threading/thread.h"
#include "base/time/time.h"
#include "chromecast/base/task_runner_impl.h"
+#include "chromecast/media/cma/base/cast_decoder_buffer_impl.h"
#include "chromecast/media/cma/base/decoder_buffer_adapter.h"
#include "chromecast/media/cma/base/decoder_config_adapter.h"
#include "chromecast/media/cma/test/frame_segmenter_for_test.h"
-#include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
#include "chromecast/public/cast_media_shlib.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
#include "chromecast/public/media/cast_decoder_buffer.h"
#include "chromecast/public/media/decoder_config.h"
-#include "chromecast/public/media/media_clock_device.h"
#include "chromecast/public/media/media_pipeline_backend.h"
#include "chromecast/public/media/media_pipeline_device_params.h"
-#include "chromecast/public/media/video_pipeline_device.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_decoder_config.h"
@@ -42,9 +39,6 @@ namespace media {
namespace {
-typedef ScopedVector<MediaComponentDeviceFeederForTest>::iterator
- ComponentDeviceIterator;
-
const base::TimeDelta kMonitorLoopDelay = base::TimeDelta::FromMilliseconds(20);
base::FilePath GetTestDataFilePath(const std::string& name) {
@@ -59,7 +53,8 @@ base::FilePath GetTestDataFilePath(const std::string& name) {
} // namespace
-class AudioVideoPipelineDeviceTest : public testing::Test {
+class AudioVideoPipelineDeviceTest : public testing::Test,
+ public MediaPipelineBackend::Delegate {
public:
struct PauseInfo {
PauseInfo() {}
@@ -96,25 +91,31 @@ class AudioVideoPipelineDeviceTest : public testing::Test {
void Start();
+ // MediaPipelineBackend::Delegate implementation:
+ void OnVideoResolutionChanged(MediaPipelineBackend::VideoDecoder* decoder,
+ const Size& size) override {}
+ void OnPushBufferComplete(MediaPipelineBackend::Decoder* decoder,
+ MediaPipelineBackend::BufferStatus status) override;
+ void OnEndOfStream(MediaPipelineBackend::Decoder* decoder) override;
+ void OnDecoderError(MediaPipelineBackend::Decoder* decoder) override;
+
private:
void Initialize();
void LoadAudioStream(const std::string& filename);
void LoadVideoStream(const std::string& filename, bool raw_h264);
+ void FeedAudioBuffer();
+ void FeedVideoBuffer();
+
void MonitorLoop();
void OnPauseCompleted();
- void OnEos(MediaComponentDeviceFeederForTest* device_feeder);
-
scoped_ptr<TaskRunnerImpl> task_runner_;
scoped_ptr<MediaPipelineBackend> backend_;
- MediaClockDevice* media_clock_device_;
-
- // Devices to feed
- ScopedVector<MediaComponentDeviceFeederForTest>
- component_device_feeders_;
+ CastDecoderBufferImpl backend_audio_buffer_;
+ CastDecoderBufferImpl backend_video_buffer_;
// Current media time.
base::TimeDelta pause_time_;
@@ -123,11 +124,25 @@ class AudioVideoPipelineDeviceTest : public testing::Test {
std::vector<PauseInfo> pause_pattern_;
int pause_pattern_idx_;
+ BufferList audio_buffers_;
+ BufferList video_buffers_;
+
+ MediaPipelineBackend::AudioDecoder* audio_decoder_;
+ MediaPipelineBackend::VideoDecoder* video_decoder_;
+ bool audio_feeding_completed_;
+ bool video_feeding_completed_;
+
DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
};
AudioVideoPipelineDeviceTest::AudioVideoPipelineDeviceTest()
- : pause_pattern_() {
+ : backend_audio_buffer_(nullptr),
+ backend_video_buffer_(nullptr),
+ pause_pattern_(),
+ audio_decoder_(nullptr),
+ video_decoder_(nullptr),
+ audio_feeding_completed_(true),
+ video_feeding_completed_(true) {
}
AudioVideoPipelineDeviceTest::~AudioVideoPipelineDeviceTest() {
@@ -147,6 +162,8 @@ void AudioVideoPipelineDeviceTest::ConfigureForAudioOnly(
const std::string& filename) {
Initialize();
LoadAudioStream(filename);
+ bool success = backend_->Initialize(this);
+ ASSERT_TRUE(success);
}
void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(
@@ -154,6 +171,8 @@ void AudioVideoPipelineDeviceTest::ConfigureForVideoOnly(
bool raw_h264) {
Initialize();
LoadVideoStream(filename, raw_h264);
+ bool success = backend_->Initialize(this);
+ ASSERT_TRUE(success);
}
void AudioVideoPipelineDeviceTest::ConfigureForFile(
@@ -161,38 +180,32 @@ void AudioVideoPipelineDeviceTest::ConfigureForFile(
Initialize();
LoadVideoStream(filename, false /* raw_h264 */);
LoadAudioStream(filename);
+ bool success = backend_->Initialize(this);
+ ASSERT_TRUE(success);
}
void AudioVideoPipelineDeviceTest::LoadAudioStream(
const std::string& filename) {
base::FilePath file_path = GetTestDataFilePath(filename);
DemuxResult demux_result = FFmpegDemuxForTest(file_path, true /* audio */);
- BufferList frames = demux_result.frames;
- AudioPipelineDevice* audio_pipeline_device = backend_->GetAudio();
+ audio_buffers_ = demux_result.frames;
+ audio_decoder_ = backend_->CreateAudioDecoder();
+ audio_feeding_completed_ = false;
- bool success = audio_pipeline_device->SetConfig(
- DecoderConfigAdapter::ToCastAudioConfig(kPrimary,
- demux_result.audio_config));
+ bool success =
+ audio_decoder_->SetConfig(DecoderConfigAdapter::ToCastAudioConfig(
+ kPrimary, demux_result.audio_config));
ASSERT_TRUE(success);
- VLOG(2) << "Got " << frames.size() << " audio input frames";
+ VLOG(2) << "Got " << audio_buffers_.size() << " audio input frames";
- frames.push_back(
- scoped_refptr<DecoderBufferBase>(
- new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
-
- MediaComponentDeviceFeederForTest* device_feeder =
- new MediaComponentDeviceFeederForTest(audio_pipeline_device, frames);
- device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
- base::Unretained(this),
- device_feeder));
- component_device_feeders_.push_back(device_feeder);
+ audio_buffers_.push_back(scoped_refptr<DecoderBufferBase>(
+ new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
}
void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename,
bool raw_h264) {
- BufferList frames;
VideoConfig video_config;
if (raw_h264) {
@@ -200,7 +213,8 @@ void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename,
base::MemoryMappedFile video_stream;
ASSERT_TRUE(video_stream.Initialize(file_path))
<< "Couldn't open stream file: " << file_path.MaybeAsASCII();
- frames = H264SegmenterForTest(video_stream.data(), video_stream.length());
+ video_buffers_ =
+ H264SegmenterForTest(video_stream.data(), video_stream.length());
// TODO(erickung): Either pull data from stream or make caller specify value
video_config.codec = kCodecH264;
@@ -211,59 +225,161 @@ void AudioVideoPipelineDeviceTest::LoadVideoStream(const std::string& filename,
base::FilePath file_path = GetTestDataFilePath(filename);
DemuxResult demux_result = FFmpegDemuxForTest(file_path,
/*audio*/ false);
- frames = demux_result.frames;
+ video_buffers_ = demux_result.frames;
video_config = DecoderConfigAdapter::ToCastVideoConfig(
kPrimary, demux_result.video_config);
}
- VideoPipelineDevice* video_pipeline_device = backend_->GetVideo();
-
- // Set configuration.
- bool success = video_pipeline_device->SetConfig(video_config);
+ video_decoder_ = backend_->CreateVideoDecoder();
+ video_feeding_completed_ = false;
+ bool success = video_decoder_->SetConfig(video_config);
ASSERT_TRUE(success);
- VLOG(2) << "Got " << frames.size() << " video input frames";
+ VLOG(2) << "Got " << video_buffers_.size() << " video input frames";
+
+ video_buffers_.push_back(scoped_refptr<DecoderBufferBase>(
+ new DecoderBufferAdapter(::media::DecoderBuffer::CreateEOSBuffer())));
+}
+
+void AudioVideoPipelineDeviceTest::FeedAudioBuffer() {
+ // Possibly feed one frame
+ DCHECK(!audio_buffers_.empty());
+ if (audio_feeding_completed_)
+ return;
+
+ scoped_refptr<DecoderBufferBase> buffer = audio_buffers_.front();
+ backend_audio_buffer_.set_buffer(buffer);
+
+ MediaPipelineBackend::BufferStatus status =
+ audio_decoder_->PushBuffer(nullptr, // decrypt_context
+ &backend_audio_buffer_);
+ EXPECT_NE(status, MediaPipelineBackend::kBufferFailed);
+ audio_buffers_.pop_front();
+
+ // Feeding is done, just wait for the end of stream callback.
+ if (buffer->end_of_stream() || audio_buffers_.empty()) {
+ if (audio_buffers_.empty() && !buffer->end_of_stream()) {
+ LOG(WARNING) << "Stream emptied without feeding EOS frame";
+ }
+
+ audio_feeding_completed_ = true;
+ return;
+ }
+
+ if (status == MediaPipelineBackend::kBufferPending)
+ return;
+
+ OnPushBufferComplete(audio_decoder_, MediaPipelineBackend::kBufferSuccess);
+}
+
+void AudioVideoPipelineDeviceTest::FeedVideoBuffer() {
+ // Possibly feed one frame
+ DCHECK(!video_buffers_.empty());
+ if (video_feeding_completed_)
+ return;
+
+ scoped_refptr<DecoderBufferBase> buffer = video_buffers_.front();
+ backend_video_buffer_.set_buffer(buffer);
+
+ MediaPipelineBackend::BufferStatus status =
+ video_decoder_->PushBuffer(nullptr, // decrypt_context
+ &backend_video_buffer_);
+ EXPECT_NE(status, MediaPipelineBackend::kBufferFailed);
+ video_buffers_.pop_front();
+
+ // Feeding is done, just wait for the end of stream callback.
+ if (buffer->end_of_stream() || video_buffers_.empty()) {
+ if (video_buffers_.empty() && !buffer->end_of_stream()) {
+ LOG(WARNING) << "Stream emptied without feeding EOS frame";
+ }
- frames.push_back(
- scoped_refptr<DecoderBufferBase>(new DecoderBufferAdapter(
- ::media::DecoderBuffer::CreateEOSBuffer())));
+ video_feeding_completed_ = true;
+ return;
+ }
+
+ if (status == MediaPipelineBackend::kBufferPending)
+ return;
- MediaComponentDeviceFeederForTest* device_feeder =
- new MediaComponentDeviceFeederForTest(video_pipeline_device, frames);
- device_feeder->Initialize(base::Bind(&AudioVideoPipelineDeviceTest::OnEos,
- base::Unretained(this),
- device_feeder));
- component_device_feeders_.push_back(device_feeder);
+ OnPushBufferComplete(video_decoder_, MediaPipelineBackend::kBufferSuccess);
}
void AudioVideoPipelineDeviceTest::Start() {
pause_time_ = base::TimeDelta();
pause_pattern_idx_ = 0;
- for (size_t i = 0; i < component_device_feeders_.size(); i++) {
+ if (audio_decoder_) {
base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed,
- base::Unretained(component_device_feeders_[i])));
+ FROM_HERE,
+ base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer,
+ base::Unretained(this)));
+ }
+ if (video_decoder_) {
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer,
+ base::Unretained(this)));
}
- media_clock_device_->SetState(MediaClockDevice::kStateRunning);
+ backend_->Start(0);
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE, base::Bind(&AudioVideoPipelineDeviceTest::MonitorLoop,
base::Unretained(this)));
}
+void AudioVideoPipelineDeviceTest::OnEndOfStream(
+ MediaPipelineBackend::Decoder* decoder) {
+ bool success = backend_->Stop();
+ ASSERT_TRUE(success);
+
+ if (decoder == audio_decoder_)
+ audio_decoder_ = nullptr;
+ else if (decoder == video_decoder_)
+ video_decoder_ = nullptr;
+
+ if (!audio_decoder_ && !video_decoder_)
+ base::MessageLoop::current()->QuitWhenIdle();
+}
+
+void AudioVideoPipelineDeviceTest::OnDecoderError(
+ MediaPipelineBackend::Decoder* decoder) {
+ ASSERT_TRUE(false);
+}
+
+void AudioVideoPipelineDeviceTest::OnPushBufferComplete(
+ MediaPipelineBackend::Decoder* decoder,
+ MediaPipelineBackend::BufferStatus status) {
+ EXPECT_NE(status, MediaPipelineBackend::kBufferFailed);
+
+ if (decoder == audio_decoder_) {
+ if (audio_feeding_completed_)
+ return;
+
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(&AudioVideoPipelineDeviceTest::FeedAudioBuffer,
+ base::Unretained(this)));
+ } else if (decoder == video_decoder_) {
+ if (video_feeding_completed_)
+ return;
+
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(&AudioVideoPipelineDeviceTest::FeedVideoBuffer,
+ base::Unretained(this)));
+ }
+}
+
void AudioVideoPipelineDeviceTest::MonitorLoop() {
- base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
- media_clock_device_->GetTimeMicroseconds());
+ base::TimeDelta media_time =
+ base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts());
if (!pause_pattern_.empty() &&
pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
// Do Pause
- media_clock_device_->SetRate(0.0);
- pause_time_ = base::TimeDelta::FromMicroseconds(
- media_clock_device_->GetTimeMicroseconds());
+ backend_->Pause();
+ pause_time_ = base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts());
VLOG(2) << "Pausing at " << pause_time_.InMilliseconds() << "ms for " <<
pause_pattern_[pause_pattern_idx_].length.InMilliseconds() << "ms";
@@ -285,8 +401,8 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() {
void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
// Make sure the media time didn't move during that time.
- base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
- media_clock_device_->GetTimeMicroseconds());
+ base::TimeDelta media_time =
+ base::TimeDelta::FromMicroseconds(backend_->GetCurrentPts());
// TODO(damienv):
// Should be:
@@ -303,41 +419,16 @@ void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
VLOG(2) << "Pause complete, restarting media clock";
// Resume playback and frame feeding.
- media_clock_device_->SetRate(1.0);
+ backend_->Resume();
MonitorLoop();
}
-void AudioVideoPipelineDeviceTest::OnEos(
- MediaComponentDeviceFeederForTest* device_feeder) {
- for (ComponentDeviceIterator it = component_device_feeders_.begin();
- it != component_device_feeders_.end();
- ++it) {
- if (*it == device_feeder) {
- component_device_feeders_.erase(it);
- break;
- }
- }
-
- // Check if all streams finished
- if (component_device_feeders_.empty())
- base::MessageLoop::current()->QuitWhenIdle();
-}
-
void AudioVideoPipelineDeviceTest::Initialize() {
// Create the media device.
task_runner_.reset(new TaskRunnerImpl());
MediaPipelineDeviceParams params(task_runner_.get());
backend_.reset(CastMediaShlib::CreateMediaPipelineBackend(params));
- media_clock_device_ = backend_->GetClock();
-
- // Clock initialization and configuration.
- bool success =
- media_clock_device_->SetState(MediaClockDevice::kStateIdle);
- ASSERT_TRUE(success);
- success = media_clock_device_->ResetTimeline(0);
- ASSERT_TRUE(success);
- media_clock_device_->SetRate(1.0);
}
TEST_F(AudioVideoPipelineDeviceTest, Mp3Playback) {
diff --git a/chromecast/media/cma/backend/media_clock_device_default.cc b/chromecast/media/cma/backend/media_clock_device_default.cc
deleted file mode 100644
index 3c7893d..0000000
--- a/chromecast/media/cma/backend/media_clock_device_default.cc
+++ /dev/null
@@ -1,114 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/backend/media_clock_device_default.h"
-
-#include "media/base/timestamp_constants.h"
-
-namespace chromecast {
-namespace media {
-namespace {
-
-// Return true if transition from |state1| to |state2| is a valid state
-// transition.
-inline static bool IsValidStateTransition(MediaClockDevice::State state1,
- MediaClockDevice::State state2) {
- if (state2 == state1)
- return true;
-
- // All states can transition to |kStateError|.
- if (state2 == MediaClockDevice::kStateError)
- return true;
-
- // All the other valid FSM transitions.
- switch (state1) {
- case MediaClockDevice::kStateUninitialized:
- return state2 == MediaClockDevice::kStateIdle;
- case MediaClockDevice::kStateIdle:
- return state2 == MediaClockDevice::kStateRunning ||
- state2 == MediaClockDevice::kStateUninitialized;
- case MediaClockDevice::kStateRunning:
- return state2 == MediaClockDevice::kStateIdle;
- case MediaClockDevice::kStateError:
- return state2 == MediaClockDevice::kStateUninitialized;
- default:
- return false;
- }
-}
-
-} // namespace
-
-MediaClockDeviceDefault::MediaClockDeviceDefault()
- : state_(kStateUninitialized),
- media_time_(::media::kNoTimestamp()),
- rate_(0.0f) {
- thread_checker_.DetachFromThread();
-}
-
-MediaClockDeviceDefault::~MediaClockDeviceDefault() {
-}
-
-MediaClockDevice::State MediaClockDeviceDefault::GetState() const {
- DCHECK(thread_checker_.CalledOnValidThread());
- return state_;
-}
-
-bool MediaClockDeviceDefault::SetState(State new_state) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(IsValidStateTransition(state_, new_state));
-
- if (new_state == state_)
- return true;
-
- state_ = new_state;
-
- if (state_ == kStateRunning) {
- stc_ = base::TimeTicks::Now();
- DCHECK(media_time_ != ::media::kNoTimestamp());
- return true;
- }
-
- if (state_ == kStateIdle) {
- media_time_ = ::media::kNoTimestamp();
- return true;
- }
-
- return true;
-}
-
-bool MediaClockDeviceDefault::ResetTimeline(int64_t time_microseconds) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK_EQ(state_, kStateIdle);
- media_time_ = base::TimeDelta::FromMicroseconds(time_microseconds);
- return true;
-}
-
-bool MediaClockDeviceDefault::SetRate(float rate) {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (state_ == kStateRunning) {
- base::TimeTicks now = base::TimeTicks::Now();
- media_time_ = media_time_ + (now - stc_) * rate_;
- stc_ = now;
- }
-
- rate_ = rate;
- return true;
-}
-
-int64_t MediaClockDeviceDefault::GetTimeMicroseconds() {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (state_ != kStateRunning)
- return media_time_.InMicroseconds();
-
- if (media_time_ == ::media::kNoTimestamp())
- return media_time_.InMicroseconds();
-
- base::TimeTicks now = base::TimeTicks::Now();
- base::TimeDelta interpolated_media_time =
- media_time_ + (now - stc_) * rate_;
- return interpolated_media_time.InMicroseconds();
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/backend/media_clock_device_default.h b/chromecast/media/cma/backend/media_clock_device_default.h
deleted file mode 100644
index e895354..0000000
--- a/chromecast/media/cma/backend/media_clock_device_default.h
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_CLOCK_DEVICE_DEFAULT_H_
-#define CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_CLOCK_DEVICE_DEFAULT_H_
-
-#include "base/macros.h"
-#include "base/threading/thread_checker.h"
-#include "base/time/time.h"
-#include "chromecast/public/media/media_clock_device.h"
-
-namespace chromecast {
-namespace media {
-
-class MediaClockDeviceDefault : public MediaClockDevice {
- public:
- MediaClockDeviceDefault();
- ~MediaClockDeviceDefault() override;
-
- // MediaClockDevice implementation.
- State GetState() const override;
- bool SetState(State new_state) override;
- bool ResetTimeline(int64_t time_microseconds) override;
- bool SetRate(float rate) override;
- int64_t GetTimeMicroseconds() override;
-
- private:
- State state_;
-
- // Media time sampled at STC time |stc_|.
- base::TimeDelta media_time_;
- base::TimeTicks stc_;
-
- float rate_;
-
- base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaClockDeviceDefault);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_CLOCK_DEVICE_DEFAULT_H_
diff --git a/chromecast/media/cma/backend/media_component_device_default.cc b/chromecast/media/cma/backend/media_component_device_default.cc
deleted file mode 100644
index bc2a567..0000000
--- a/chromecast/media/cma/backend/media_component_device_default.cc
+++ /dev/null
@@ -1,241 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/backend/media_component_device_default.h"
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "base/location.h"
-#include "base/thread_task_runner_handle.h"
-#include "chromecast/public/media/cast_decoder_buffer.h"
-#include "chromecast/public/media/decrypt_context.h"
-#include "chromecast/public/media/media_pipeline_device_params.h"
-#include "chromecast/public/task_runner.h"
-#include "media/base/timestamp_constants.h"
-
-namespace chromecast {
-namespace media {
-
-namespace {
-
-// Maximum number of frames that can be buffered.
-const size_t kMaxFrameCount = 20;
-
-// Wraps base::Closure in the chromecast/public TaskRunner interface.
-class ClosureTask : public TaskRunner::Task {
- public:
- ClosureTask(const base::Closure& cb) : cb_(cb) {}
- void Run() override { cb_.Run(); }
-
- private:
- base::Closure cb_;
-};
-
-// Returns whether or not transitioning from |state1| to |state2| is valid.
-inline static bool IsValidStateTransition(MediaComponentDevice::State state1,
- MediaComponentDevice::State state2) {
- if (state2 == state1)
- return true;
-
- // All states can transition to |kStateError|.
- if (state2 == MediaComponentDevice::kStateError)
- return true;
-
- // All the other valid FSM transitions.
- switch (state1) {
- case MediaComponentDevice::kStateUninitialized:
- return state2 == MediaComponentDevice::kStateIdle;
- case MediaComponentDevice::kStateIdle:
- return state2 == MediaComponentDevice::kStateRunning ||
- state2 == MediaComponentDevice::kStatePaused ||
- state2 == MediaComponentDevice::kStateUninitialized;
- case MediaComponentDevice::kStatePaused:
- return state2 == MediaComponentDevice::kStateIdle ||
- state2 == MediaComponentDevice::kStateRunning;
- case MediaComponentDevice::kStateRunning:
- return state2 == MediaComponentDevice::kStateIdle ||
- state2 == MediaComponentDevice::kStatePaused;
- case MediaComponentDevice::kStateError:
- return state2 == MediaComponentDevice::kStateUninitialized;
-
- default:
- return false;
- }
-}
-
-} // namespace
-
-MediaComponentDeviceDefault::DefaultDecoderBuffer::DefaultDecoderBuffer()
- : size(0) {
-}
-
-MediaComponentDeviceDefault::DefaultDecoderBuffer::~DefaultDecoderBuffer() {
-}
-
-MediaComponentDeviceDefault::MediaComponentDeviceDefault(
- const MediaPipelineDeviceParams& params,
- MediaClockDevice* media_clock_device)
- : task_runner_(params.task_runner),
- media_clock_device_(media_clock_device),
- state_(kStateUninitialized),
- rendering_time_(::media::kNoTimestamp()),
- decoded_frame_count_(0),
- decoded_byte_count_(0),
- scheduled_rendering_task_(false),
- weak_factory_(this) {
- weak_this_ = weak_factory_.GetWeakPtr();
- thread_checker_.DetachFromThread();
-}
-
-MediaComponentDeviceDefault::~MediaComponentDeviceDefault() {
-}
-
-void MediaComponentDeviceDefault::SetClient(Client* client) {
- DCHECK(thread_checker_.CalledOnValidThread());
- client_.reset(client);
-}
-
-MediaComponentDevice::State MediaComponentDeviceDefault::GetState() const {
- DCHECK(thread_checker_.CalledOnValidThread());
- return state_;
-}
-
-bool MediaComponentDeviceDefault::SetState(State new_state) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(IsValidStateTransition(state_, new_state));
- state_ = new_state;
-
- if (state_ == kStateIdle) {
- // Back to the idle state: reset a bunch of parameters.
- is_eos_ = false;
- rendering_time_ = ::media::kNoTimestamp();
- decoded_frame_count_ = 0;
- decoded_byte_count_ = 0;
- frames_.clear();
- pending_buffer_ = scoped_ptr<CastDecoderBuffer>();
- frame_pushed_cb_.reset();
- return true;
- }
-
- if (state_ == kStateRunning) {
- if (!scheduled_rendering_task_) {
- scheduled_rendering_task_ = true;
- task_runner_->PostTask(
- new ClosureTask(
- base::Bind(&MediaComponentDeviceDefault::RenderTask, weak_this_)),
- 0);
- }
- return true;
- }
-
- return true;
-}
-
-bool MediaComponentDeviceDefault::SetStartPts(int64_t time_microseconds) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK_EQ(state_, kStateIdle);
- rendering_time_ = base::TimeDelta::FromMicroseconds(time_microseconds);
- return true;
-}
-
-MediaComponentDevice::FrameStatus MediaComponentDeviceDefault::PushFrame(
- DecryptContext* decrypt_context_in,
- CastDecoderBuffer* buffer_in,
- FrameStatusCB* completion_cb_in) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(state_ == kStatePaused || state_ == kStateRunning);
- DCHECK(!is_eos_);
- DCHECK(!pending_buffer_.get());
- DCHECK(buffer_in);
-
- scoped_ptr<DecryptContext> decrypt_context(decrypt_context_in);
- scoped_ptr<FrameStatusCB> completion_cb(completion_cb_in);
-
- scoped_ptr<CastDecoderBuffer> buffer(buffer_in);
- if (buffer->end_of_stream()) {
- is_eos_ = true;
- return kFrameSuccess;
- }
-
- if (frames_.size() > kMaxFrameCount) {
- pending_buffer_ = buffer.Pass();
- frame_pushed_cb_ = completion_cb.Pass();
- return kFramePending;
- }
-
- DefaultDecoderBuffer fake_buffer;
- fake_buffer.size = buffer->data_size();
- fake_buffer.pts = base::TimeDelta::FromMicroseconds(buffer->timestamp());
- frames_.push_back(fake_buffer);
- return kFrameSuccess;
-}
-
-MediaComponentDeviceDefault::RenderingDelay
-MediaComponentDeviceDefault::GetRenderingDelay() const {
- NOTIMPLEMENTED();
- return RenderingDelay();
-}
-
-void MediaComponentDeviceDefault::RenderTask() {
- scheduled_rendering_task_ = false;
-
- if (state_ != kStateRunning)
- return;
-
- base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
- media_clock_device_->GetTimeMicroseconds());
- if (media_time == ::media::kNoTimestamp()) {
- scheduled_rendering_task_ = true;
- task_runner_->PostTask(
- new ClosureTask(
- base::Bind(&MediaComponentDeviceDefault::RenderTask, weak_this_)),
- 50);
- return;
- }
-
- while (!frames_.empty() && frames_.front().pts <= media_time) {
- rendering_time_ = frames_.front().pts;
- decoded_frame_count_++;
- decoded_byte_count_ += frames_.front().size;
- frames_.pop_front();
- if (pending_buffer_.get()) {
- DefaultDecoderBuffer fake_buffer;
- fake_buffer.size = pending_buffer_->data_size();
- fake_buffer.pts =
- base::TimeDelta::FromMicroseconds(pending_buffer_->timestamp());
- frames_.push_back(fake_buffer);
- pending_buffer_ = scoped_ptr<CastDecoderBuffer>();
- frame_pushed_cb_->Run(kFrameSuccess);
- frame_pushed_cb_.reset();
- }
- }
-
- if (frames_.empty() && is_eos_) {
- if (client_) {
- client_->OnEndOfStream();
- }
- return;
- }
-
- scheduled_rendering_task_ = true;
- task_runner_->PostTask(
- new ClosureTask(
- base::Bind(&MediaComponentDeviceDefault::RenderTask, weak_this_)),
- 50);
-}
-
-bool MediaComponentDeviceDefault::GetStatistics(Statistics* stats) const {
- if (state_ != kStateRunning)
- return false;
-
- // Note: what is returned here is not the number of samples but the number of
- // frames. The value is different for audio.
- stats->decoded_bytes = decoded_byte_count_;
- stats->decoded_samples = decoded_frame_count_;
- stats->dropped_samples = 0;
- return true;
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/backend/media_component_device_default.h b/chromecast/media/cma/backend/media_component_device_default.h
deleted file mode 100644
index d6dc7ea..0000000
--- a/chromecast/media/cma/backend/media_component_device_default.h
+++ /dev/null
@@ -1,91 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_COMPONENT_DEVICE_DEFAULT_H_
-#define CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_COMPONENT_DEVICE_DEFAULT_H_
-
-#include <list>
-
-#include "base/macros.h"
-#include "base/memory/weak_ptr.h"
-#include "base/threading/thread_checker.h"
-
-#include "chromecast/public/media/media_clock_device.h"
-#include "chromecast/public/media/media_component_device.h"
-
-namespace chromecast {
-class TaskRunner;
-
-namespace media {
-struct MediaPipelineDeviceParams;
-
-class MediaComponentDeviceDefault : public MediaComponentDevice {
- public:
- MediaComponentDeviceDefault(const MediaPipelineDeviceParams& params,
- MediaClockDevice* media_clock_device);
- ~MediaComponentDeviceDefault() override;
-
- // MediaComponentDevice implementation.
- void SetClient(Client* client) override;
- State GetState() const override;
- bool SetState(State new_state) override;
- bool SetStartPts(int64_t time_microseconds) override;
- FrameStatus PushFrame(DecryptContext* decrypt_context,
- CastDecoderBuffer* buffer,
- FrameStatusCB* completion_cb) override;
- RenderingDelay GetRenderingDelay() const override;
- bool GetStatistics(Statistics* stats) const override;
-
- private:
- struct DefaultDecoderBuffer {
- DefaultDecoderBuffer();
- ~DefaultDecoderBuffer();
-
- // Buffer size.
- size_t size;
-
- // Presentation timestamp.
- base::TimeDelta pts;
- };
-
- void RenderTask();
-
- TaskRunner* task_runner_;
- MediaClockDevice* const media_clock_device_;
- scoped_ptr<Client> client_;
-
- State state_;
-
- // Indicate whether the end of stream has been received.
- bool is_eos_;
-
- // Media time of the last rendered audio sample.
- base::TimeDelta rendering_time_;
-
- // Frame decoded/rendered since the pipeline left the idle state.
- uint64 decoded_frame_count_;
- uint64 decoded_byte_count_;
-
- // List of frames not rendered yet.
- std::list<DefaultDecoderBuffer> frames_;
-
- // Indicate whether there is a scheduled rendering task.
- bool scheduled_rendering_task_;
-
- // Pending frame.
- scoped_ptr<CastDecoderBuffer> pending_buffer_;
- scoped_ptr<FrameStatusCB> frame_pushed_cb_;
-
- base::ThreadChecker thread_checker_;
-
- base::WeakPtr<MediaComponentDeviceDefault> weak_this_;
- base::WeakPtrFactory<MediaComponentDeviceDefault> weak_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaComponentDeviceDefault);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_COMPONENT_DEVICE_DEFAULT_H_
diff --git a/chromecast/media/cma/backend/media_pipeline_backend_default.cc b/chromecast/media/cma/backend/media_pipeline_backend_default.cc
index 498c5be..bb0cc1e 100644
--- a/chromecast/media/cma/backend/media_pipeline_backend_default.cc
+++ b/chromecast/media/cma/backend/media_pipeline_backend_default.cc
@@ -4,35 +4,145 @@
#include "chromecast/media/cma/backend/media_pipeline_backend_default.h"
-#include "chromecast/media/cma/backend/audio_pipeline_device_default.h"
-#include "chromecast/media/cma/backend/media_clock_device_default.h"
-#include "chromecast/media/cma/backend/video_pipeline_device_default.h"
+#include "chromecast/public/media/cast_decoder_buffer.h"
namespace chromecast {
namespace media {
-MediaPipelineBackendDefault::MediaPipelineBackendDefault(
- const MediaPipelineDeviceParams& params)
- : params_(params) {}
+class MediaPipelineBackendDefault::AudioDecoderDefault
+ : public MediaPipelineBackend::AudioDecoder {
+ public:
+ AudioDecoderDefault() : delegate_(nullptr) {}
+ ~AudioDecoderDefault() override {}
-MediaPipelineBackendDefault::~MediaPipelineBackendDefault() {}
+ void SetDelegate(MediaPipelineBackend::Delegate* delegate) {
+ delegate_ = delegate;
+ }
-MediaClockDevice* MediaPipelineBackendDefault::GetClock() {
- if (!clock_)
- clock_.reset(new MediaClockDeviceDefault());
- return clock_.get();
+ // MediaPipelineBackend::AudioDecoder implementation:
+ BufferStatus PushBuffer(DecryptContext* decrypt_context,
+ CastDecoderBuffer* buffer) override {
+ if (buffer->end_of_stream())
+ delegate_->OnEndOfStream(this);
+ return MediaPipelineBackend::kBufferSuccess;
+ }
+
+ void GetStatistics(Statistics* statistics) override {}
+
+ bool SetConfig(const AudioConfig& config) override { return true; }
+
+ bool SetVolume(float multiplier) override { return true; }
+
+ RenderingDelay GetRenderingDelay() override { return RenderingDelay(); }
+
+ private:
+ MediaPipelineBackend::Delegate* delegate_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioDecoderDefault);
+};
+
+class MediaPipelineBackendDefault::VideoDecoderDefault
+ : public MediaPipelineBackend::VideoDecoder {
+ public:
+ VideoDecoderDefault() : delegate_(nullptr) {}
+ ~VideoDecoderDefault() override {}
+
+ void SetDelegate(MediaPipelineBackend::Delegate* delegate) {
+ delegate_ = delegate;
+ }
+
+ // MediaPipelineBackend::VideoDecoder implementation:
+ BufferStatus PushBuffer(DecryptContext* decrypt_context,
+ CastDecoderBuffer* buffer) override {
+ if (buffer->end_of_stream())
+ delegate_->OnEndOfStream(this);
+ return MediaPipelineBackend::kBufferSuccess;
+ }
+
+ void GetStatistics(Statistics* statistics) override {}
+
+ bool SetConfig(const VideoConfig& config) override { return true; }
+
+ private:
+ MediaPipelineBackend::Delegate* delegate_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoDecoderDefault);
+};
+
+MediaPipelineBackendDefault::MediaPipelineBackendDefault()
+ : running_(false), rate_(1.0f) {
+}
+
+MediaPipelineBackendDefault::~MediaPipelineBackendDefault() {
}
-AudioPipelineDevice* MediaPipelineBackendDefault::GetAudio() {
- if (!audio_)
- audio_.reset(new AudioPipelineDeviceDefault(params_, GetClock()));
- return audio_.get();
+MediaPipelineBackend::AudioDecoder*
+MediaPipelineBackendDefault::CreateAudioDecoder() {
+ DCHECK(!audio_decoder_);
+ audio_decoder_.reset(new AudioDecoderDefault());
+ return audio_decoder_.get();
+}
+
+MediaPipelineBackend::VideoDecoder*
+MediaPipelineBackendDefault::CreateVideoDecoder() {
+ DCHECK(!video_decoder_);
+ video_decoder_.reset(new VideoDecoderDefault());
+ return video_decoder_.get();
+}
+
+bool MediaPipelineBackendDefault::Initialize(Delegate* delegate) {
+ DCHECK(delegate);
+ if (audio_decoder_)
+ audio_decoder_->SetDelegate(delegate);
+ if (video_decoder_)
+ video_decoder_->SetDelegate(delegate);
+ return true;
+}
+
+bool MediaPipelineBackendDefault::Start(int64_t start_pts) {
+ DCHECK(!running_);
+ start_pts_ = base::TimeDelta::FromMicroseconds(start_pts);
+ start_clock_ = base::TimeTicks::Now();
+ running_ = true;
+ return true;
+}
+
+bool MediaPipelineBackendDefault::Stop() {
+ start_pts_ = base::TimeDelta::FromMicroseconds(GetCurrentPts());
+ running_ = false;
+ return true;
+}
+
+bool MediaPipelineBackendDefault::Pause() {
+ DCHECK(running_);
+ start_pts_ = base::TimeDelta::FromMicroseconds(GetCurrentPts());
+ running_ = false;
+ return true;
+}
+
+bool MediaPipelineBackendDefault::Resume() {
+ DCHECK(!running_);
+ running_ = true;
+ start_clock_ = base::TimeTicks::Now();
+ return true;
+}
+
+int64_t MediaPipelineBackendDefault::GetCurrentPts() {
+ if (!running_)
+ return start_pts_.InMicroseconds();
+
+ base::TimeTicks now = base::TimeTicks::Now();
+ base::TimeDelta interpolated_media_time =
+ start_pts_ + (now - start_clock_) * rate_;
+ return interpolated_media_time.InMicroseconds();
}
-VideoPipelineDevice* MediaPipelineBackendDefault::GetVideo() {
- if (!video_)
- video_.reset(new VideoPipelineDeviceDefault(params_, GetClock()));
- return video_.get();
+bool MediaPipelineBackendDefault::SetPlaybackRate(float rate) {
+ DCHECK_GT(rate, 0.0f);
+ start_pts_ = base::TimeDelta::FromMicroseconds(GetCurrentPts());
+ start_clock_ = base::TimeTicks::Now();
+ rate_ = rate;
+ return true;
}
} // namespace media
diff --git a/chromecast/media/cma/backend/media_pipeline_backend_default.h b/chromecast/media/cma/backend/media_pipeline_backend_default.h
index 7a9a602..8dc2b94 100644
--- a/chromecast/media/cma/backend/media_pipeline_backend_default.h
+++ b/chromecast/media/cma/backend/media_pipeline_backend_default.h
@@ -2,12 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_PIPELINE_DEVICE_FACTORY_DEFAULT_H_
-#define CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_PIPELINE_DEVICE_FACTORY_DEFAULT_H_
+#ifndef CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_PIPELINE_BACKEND_DEFAULT_H_
+#define CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_PIPELINE_BACKEND_DEFAULT_H_
+#include "base/macros.h"
#include "base/memory/scoped_ptr.h"
+#include "base/time/time.h"
#include "chromecast/public/media/media_pipeline_backend.h"
-#include "chromecast/public/media/media_pipeline_device_params.h"
namespace chromecast {
namespace media {
@@ -15,19 +16,31 @@ namespace media {
// Factory that instantiates default (stub) media pipeline device elements.
class MediaPipelineBackendDefault : public MediaPipelineBackend {
public:
- MediaPipelineBackendDefault(const MediaPipelineDeviceParams& params);
+ MediaPipelineBackendDefault();
~MediaPipelineBackendDefault() override;
- // MediaPipelineBackend implementation
- MediaClockDevice* GetClock() override;
- AudioPipelineDevice* GetAudio() override;
- VideoPipelineDevice* GetVideo() override;
+ // MediaPipelineBackend implementation:
+ AudioDecoder* CreateAudioDecoder() override;
+ VideoDecoder* CreateVideoDecoder() override;
+ bool Initialize(Delegate* delegate) override;
+ bool Start(int64_t start_pts) override;
+ bool Stop() override;
+ bool Pause() override;
+ bool Resume() override;
+ int64_t GetCurrentPts() override;
+ bool SetPlaybackRate(float rate) override;
private:
- MediaPipelineDeviceParams params_;
- scoped_ptr<MediaClockDevice> clock_;
- scoped_ptr<AudioPipelineDevice> audio_;
- scoped_ptr<VideoPipelineDevice> video_;
+ class AudioDecoderDefault;
+ class VideoDecoderDefault;
+
+ base::TimeDelta start_pts_;
+ base::TimeTicks start_clock_;
+ bool running_;
+ float rate_;
+
+ scoped_ptr<AudioDecoderDefault> audio_decoder_;
+ scoped_ptr<VideoDecoderDefault> video_decoder_;
DISALLOW_COPY_AND_ASSIGN(MediaPipelineBackendDefault);
};
@@ -35,4 +48,4 @@ class MediaPipelineBackendDefault : public MediaPipelineBackend {
} // namespace media
} // namespace chromecast
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_PIPELINE_DEVICE_FACTORY_DEFAULT_H_
+#endif // CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_PIPELINE_BACKEND_DEFAULT_H_
diff --git a/chromecast/media/cma/backend/video_pipeline_device_default.cc b/chromecast/media/cma/backend/video_pipeline_device_default.cc
deleted file mode 100644
index dd30517..0000000
--- a/chromecast/media/cma/backend/video_pipeline_device_default.cc
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/backend/video_pipeline_device_default.h"
-
-#include "chromecast/media/cma/backend/media_component_device_default.h"
-
-namespace chromecast {
-namespace media {
-
-VideoPipelineDeviceDefault::VideoPipelineDeviceDefault(
- const MediaPipelineDeviceParams& params,
- MediaClockDevice* media_clock_device)
- : pipeline_(new MediaComponentDeviceDefault(params, media_clock_device)) {
- thread_checker_.DetachFromThread();
-}
-
-VideoPipelineDeviceDefault::~VideoPipelineDeviceDefault() {
-}
-
-void VideoPipelineDeviceDefault::SetClient(Client* client) {
- pipeline_->SetClient(client);
-}
-
-MediaComponentDevice::State VideoPipelineDeviceDefault::GetState() const {
- return pipeline_->GetState();
-}
-
-bool VideoPipelineDeviceDefault::SetState(State new_state) {
- bool success = pipeline_->SetState(new_state);
- if (!success)
- return false;
-
- if (new_state == kStateIdle) {
- DCHECK(IsValidConfig(config_));
- }
- if (new_state == kStateUninitialized) {
- config_ = VideoConfig();
- }
- return true;
-}
-
-bool VideoPipelineDeviceDefault::SetStartPts(int64_t time_microseconds) {
- return pipeline_->SetStartPts(time_microseconds);
-}
-
-MediaComponentDevice::FrameStatus VideoPipelineDeviceDefault::PushFrame(
- DecryptContext* decrypt_context,
- CastDecoderBuffer* buffer,
- FrameStatusCB* completion_cb) {
- return pipeline_->PushFrame(decrypt_context, buffer, completion_cb);
-}
-
-VideoPipelineDeviceDefault::RenderingDelay
-VideoPipelineDeviceDefault::GetRenderingDelay() const {
- return pipeline_->GetRenderingDelay();
-}
-
-void VideoPipelineDeviceDefault::SetVideoClient(VideoClient* client) {
- delete client;
-}
-
-bool VideoPipelineDeviceDefault::SetConfig(const VideoConfig& config) {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!IsValidConfig(config))
- return false;
- config_ = config;
- if (config.extra_data_size > 0)
- config_extra_data_.assign(config.extra_data,
- config.extra_data + config.extra_data_size);
- else
- config_extra_data_.clear();
- return true;
-}
-
-bool VideoPipelineDeviceDefault::GetStatistics(Statistics* stats) const {
- return pipeline_->GetStatistics(stats);
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/backend/video_pipeline_device_default.h b/chromecast/media/cma/backend/video_pipeline_device_default.h
deleted file mode 100644
index 8dd61d8..0000000
--- a/chromecast/media/cma/backend/video_pipeline_device_default.h
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_BACKEND_VIDEO_PIPELINE_DEVICE_DEFAULT_H_
-#define CHROMECAST_MEDIA_CMA_BACKEND_VIDEO_PIPELINE_DEVICE_DEFAULT_H_
-
-#include <vector>
-
-#include "base/macros.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/threading/thread_checker.h"
-#include "chromecast/public/media/decoder_config.h"
-#include "chromecast/public/media/video_pipeline_device.h"
-
-namespace chromecast {
-namespace media {
-
-class MediaClockDevice;
-class MediaComponentDeviceDefault;
-struct MediaPipelineDeviceParams;
-
-class VideoPipelineDeviceDefault : public VideoPipelineDevice {
- public:
- VideoPipelineDeviceDefault(const MediaPipelineDeviceParams& params,
- MediaClockDevice* media_clock_device);
- ~VideoPipelineDeviceDefault() override;
-
- // VideoPipelineDevice implementation.
- void SetClient(Client* client) override;
- State GetState() const override;
- bool SetState(State new_state) override;
- bool SetStartPts(int64_t time_microseconds) override;
- FrameStatus PushFrame(DecryptContext* decrypt_context,
- CastDecoderBuffer* buffer,
- FrameStatusCB* completion_cb) override;
- RenderingDelay GetRenderingDelay() const override;
- void SetVideoClient(VideoClient* client) override;
- bool SetConfig(const VideoConfig& config) override;
- bool GetStatistics(Statistics* stats) const override;
-
- private:
- scoped_ptr<MediaComponentDeviceDefault> pipeline_;
-
- VideoConfig config_;
- std::vector<uint8_t> config_extra_data_;
-
- base::ThreadChecker thread_checker_;
- DISALLOW_COPY_AND_ASSIGN(VideoPipelineDeviceDefault);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_VIDEO_PIPELINE_DEVICE_DEFAULT_H_
diff --git a/chromecast/media/cma/base/cast_decoder_buffer_impl.cc b/chromecast/media/cma/base/cast_decoder_buffer_impl.cc
index e29b620..e958048 100644
--- a/chromecast/media/cma/base/cast_decoder_buffer_impl.cc
+++ b/chromecast/media/cma/base/cast_decoder_buffer_impl.cc
@@ -41,5 +41,10 @@ const scoped_refptr<DecoderBufferBase>& CastDecoderBufferImpl::buffer() const {
return buffer_;
}
+void CastDecoderBufferImpl::set_buffer(
+ const scoped_refptr<DecoderBufferBase>& buffer) {
+ buffer_ = buffer;
+}
+
} // namespace media
} // namespace chromecast
diff --git a/chromecast/media/cma/base/cast_decoder_buffer_impl.h b/chromecast/media/cma/base/cast_decoder_buffer_impl.h
index 444ccf4..adc663c 100644
--- a/chromecast/media/cma/base/cast_decoder_buffer_impl.h
+++ b/chromecast/media/cma/base/cast_decoder_buffer_impl.h
@@ -42,6 +42,7 @@ class CastDecoderBufferImpl : public CastDecoderBuffer {
bool end_of_stream() const override;
const scoped_refptr<DecoderBufferBase>& buffer() const;
+ void set_buffer(const scoped_refptr<DecoderBufferBase>& buffer);
private:
scoped_refptr<DecoderBufferBase> buffer_;
diff --git a/chromecast/media/cma/filters/BUILD.gn b/chromecast/media/cma/filters/BUILD.gn
deleted file mode 100644
index 003b5b1..0000000
--- a/chromecast/media/cma/filters/BUILD.gn
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-source_set("filters") {
- sources = [
- "cma_renderer.cc",
- "cma_renderer.h",
- "demuxer_stream_adapter.cc",
- "demuxer_stream_adapter.h",
- "hole_frame_factory.cc",
- "hole_frame_factory.h",
- ]
-
- configs += [ "//chromecast:config" ]
-
- deps = [
- "//base",
- "//chromecast/media/cma/base",
- "//chromecast/media/cma/pipeline",
- "//gpu/command_buffer/client:gles2_interface",
- "//gpu/command_buffer/common",
- "//media",
- "//ui/gfx/geometry",
- ]
-}
diff --git a/chromecast/media/cma/pipeline/BUILD.gn b/chromecast/media/cma/pipeline/BUILD.gn
index 8f2054f..1bfbd84 100644
--- a/chromecast/media/cma/pipeline/BUILD.gn
+++ b/chromecast/media/cma/pipeline/BUILD.gn
@@ -4,8 +4,6 @@
source_set("pipeline") {
sources = [
- "audio_pipeline.cc",
- "audio_pipeline.h",
"audio_pipeline_impl.cc",
"audio_pipeline_impl.h",
"av_pipeline_client.cc",
@@ -14,22 +12,13 @@ source_set("pipeline") {
"av_pipeline_impl.h",
"decrypt_util.cc",
"decrypt_util.h",
- "frame_status_cb_impl.cc",
- "frame_status_cb_impl.h",
"load_type.h",
- "media_component_device_client_impl.cc",
- "media_component_device_client_impl.h",
- "media_pipeline.h",
"media_pipeline_client.cc",
"media_pipeline_client.h",
"media_pipeline_impl.cc",
"media_pipeline_impl.h",
- "video_pipeline.cc",
- "video_pipeline.h",
"video_pipeline_client.cc",
"video_pipeline_client.h",
- "video_pipeline_device_client_impl.cc",
- "video_pipeline_device_client_impl.h",
"video_pipeline_impl.cc",
"video_pipeline_impl.h",
]
diff --git a/chromecast/media/cma/pipeline/audio_pipeline.cc b/chromecast/media/cma/pipeline/audio_pipeline.cc
deleted file mode 100644
index 7d98e09..0000000
--- a/chromecast/media/cma/pipeline/audio_pipeline.cc
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/pipeline/audio_pipeline.h"
-
-namespace chromecast {
-namespace media {
-
-AudioPipeline::AudioPipeline() {
-}
-
-AudioPipeline::~AudioPipeline() {
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/pipeline/audio_pipeline.h b/chromecast/media/cma/pipeline/audio_pipeline.h
deleted file mode 100644
index d7284a1..0000000
--- a/chromecast/media/cma/pipeline/audio_pipeline.h
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_PIPELINE_AUDIO_PIPELINE_H_
-#define CHROMECAST_MEDIA_CMA_PIPELINE_AUDIO_PIPELINE_H_
-
-#include "base/macros.h"
-#include "chromecast/media/cma/pipeline/av_pipeline_client.h"
-
-namespace chromecast {
-namespace media {
-
-class AudioPipeline {
- public:
- AudioPipeline();
- virtual ~AudioPipeline();
-
- // Set the audio client.
- virtual void SetClient(const AvPipelineClient& client) = 0;
-
- // Set the stream volume.
- // - A value of 1.0 is the neutral value.
- // - |volume|=0.0 mutes the stream.
- virtual void SetVolume(float volume) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AudioPipeline);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_PIPELINE_AUDIO_PIPELINE_H_
diff --git a/chromecast/media/cma/pipeline/audio_pipeline_impl.cc b/chromecast/media/cma/pipeline/audio_pipeline_impl.cc
index c7bdfda..b88f686 100644
--- a/chromecast/media/cma/pipeline/audio_pipeline_impl.cc
+++ b/chromecast/media/cma/pipeline/audio_pipeline_impl.cc
@@ -10,7 +10,6 @@
#include "chromecast/media/cma/base/coded_frame_provider.h"
#include "chromecast/media/cma/base/decoder_config_adapter.h"
#include "chromecast/media/cma/pipeline/av_pipeline_impl.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
#include "chromecast/public/media/decoder_config.h"
#include "media/base/audio_decoder_config.h"
@@ -21,11 +20,12 @@ namespace {
const size_t kMaxAudioFrameSize = 32 * 1024;
}
-AudioPipelineImpl::AudioPipelineImpl(AudioPipelineDevice* audio_device)
- : audio_device_(audio_device),
- weak_factory_(this) {
+AudioPipelineImpl::AudioPipelineImpl(
+ MediaPipelineBackend::AudioDecoder* decoder,
+ const AvPipelineClient& client)
+ : decoder_(decoder), audio_client_(client), weak_factory_(this) {
av_pipeline_impl_.reset(new AvPipelineImpl(
- audio_device_,
+ decoder_,
base::Bind(&AudioPipelineImpl::OnUpdateConfig, base::Unretained(this))));
weak_this_ = weak_factory_.GetWeakPtr();
}
@@ -39,16 +39,10 @@ void AudioPipelineImpl::SetCodedFrameProvider(
frame_provider.Pass(), kAppAudioBufferSize, kMaxAudioFrameSize);
}
-void AudioPipelineImpl::SetClient(const AvPipelineClient& client) {
- audio_client_ = client;
- av_pipeline_impl_->SetClient(client);
-}
-
bool AudioPipelineImpl::StartPlayingFrom(
base::TimeDelta time,
const scoped_refptr<BufferingState>& buffering_state) {
- CMALOG(kLogControl) << "AudioPipelineImpl::StartPlayingFrom t0="
- << time.InMilliseconds();
+ CMALOG(kLogControl) << __FUNCTION__ << " t0=" << time.InMilliseconds();
// Reset the pipeline statistics.
previous_stats_ = ::media::PipelineStatistics();
@@ -68,7 +62,7 @@ bool AudioPipelineImpl::StartPlayingFrom(
}
void AudioPipelineImpl::Flush(const ::media::PipelineStatusCB& status_cb) {
- CMALOG(kLogControl) << "AudioPipelineImpl::Flush";
+ CMALOG(kLogControl) << __FUNCTION__;
if (av_pipeline_impl_->GetState() == AvPipelineImpl::kError) {
status_cb.Run(::media::PIPELINE_ERROR_ABORT);
return;
@@ -81,7 +75,7 @@ void AudioPipelineImpl::Flush(const ::media::PipelineStatusCB& status_cb) {
void AudioPipelineImpl::OnFlushDone(
const ::media::PipelineStatusCB& status_cb) {
- CMALOG(kLogControl) << "AudioPipelineImpl::OnFlushDone";
+ CMALOG(kLogControl) << __FUNCTION__;
if (av_pipeline_impl_->GetState() == AvPipelineImpl::kError) {
status_cb.Run(::media::PIPELINE_ERROR_ABORT);
return;
@@ -91,7 +85,7 @@ void AudioPipelineImpl::OnFlushDone(
}
void AudioPipelineImpl::Stop() {
- CMALOG(kLogControl) << "AudioPipelineImpl::Stop";
+ CMALOG(kLogControl) << __FUNCTION__;
av_pipeline_impl_->Stop();
av_pipeline_impl_->TransitionToState(AvPipelineImpl::kStopped);
}
@@ -104,15 +98,14 @@ void AudioPipelineImpl::Initialize(
const ::media::AudioDecoderConfig& audio_config,
scoped_ptr<CodedFrameProvider> frame_provider,
const ::media::PipelineStatusCB& status_cb) {
- CMALOG(kLogControl) << "AudioPipelineImpl::Initialize "
+ CMALOG(kLogControl) << __FUNCTION__ << " "
<< audio_config.AsHumanReadableString();
if (frame_provider)
SetCodedFrameProvider(frame_provider.Pass());
DCHECK(audio_config.IsValidConfig());
- if (!audio_device_->SetConfig(
- DecoderConfigAdapter::ToCastAudioConfig(kPrimary, audio_config)) ||
- !av_pipeline_impl_->Initialize()) {
+ if (!decoder_->SetConfig(
+ DecoderConfigAdapter::ToCastAudioConfig(kPrimary, audio_config))) {
status_cb.Run(::media::PIPELINE_ERROR_INITIALIZATION_FAILED);
return;
}
@@ -121,7 +114,24 @@ void AudioPipelineImpl::Initialize(
}
void AudioPipelineImpl::SetVolume(float volume) {
- audio_device_->SetStreamVolumeMultiplier(volume);
+ decoder_->SetVolume(volume);
+}
+
+void AudioPipelineImpl::OnBufferPushed(
+ MediaPipelineBackend::BufferStatus status) {
+ av_pipeline_impl_->OnBufferPushed(status);
+}
+
+void AudioPipelineImpl::OnEndOfStream() {
+ if (!audio_client_.eos_cb.is_null())
+ audio_client_.eos_cb.Run();
+}
+
+void AudioPipelineImpl::OnError() {
+ if (!audio_client_.playback_error_cb.is_null()) {
+ audio_client_.playback_error_cb.Run(
+ ::media::PIPELINE_ERROR_COULD_NOT_RENDER);
+ }
}
void AudioPipelineImpl::OnUpdateConfig(
@@ -129,10 +139,10 @@ void AudioPipelineImpl::OnUpdateConfig(
const ::media::AudioDecoderConfig& audio_config,
const ::media::VideoDecoderConfig& video_config) {
if (audio_config.IsValidConfig()) {
- CMALOG(kLogControl) << "AudioPipelineImpl::OnUpdateConfig id:" << id << " "
+ CMALOG(kLogControl) << __FUNCTION__ << " id:" << id << " "
<< audio_config.AsHumanReadableString();
- bool success = audio_device_->SetConfig(
+ bool success = decoder_->SetConfig(
DecoderConfigAdapter::ToCastAudioConfig(id, audio_config));
if (!success && !audio_client_.playback_error_cb.is_null())
audio_client_.playback_error_cb.Run(::media::PIPELINE_ERROR_DECODE);
@@ -143,9 +153,8 @@ void AudioPipelineImpl::UpdateStatistics() {
if (audio_client_.statistics_cb.is_null())
return;
- MediaComponentDevice::Statistics device_stats;
- if (!audio_device_->GetStatistics(&device_stats))
- return;
+ MediaPipelineBackend::Decoder::Statistics device_stats;
+ decoder_->GetStatistics(&device_stats);
::media::PipelineStatistics current_stats;
current_stats.audio_bytes_decoded = device_stats.decoded_bytes;
diff --git a/chromecast/media/cma/pipeline/audio_pipeline_impl.h b/chromecast/media/cma/pipeline/audio_pipeline_impl.h
index 31cadf9..9afe19b 100644
--- a/chromecast/media/cma/pipeline/audio_pipeline_impl.h
+++ b/chromecast/media/cma/pipeline/audio_pipeline_impl.h
@@ -10,8 +10,8 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
-#include "chromecast/media/cma/pipeline/audio_pipeline.h"
#include "chromecast/media/cma/pipeline/av_pipeline_client.h"
+#include "chromecast/public/media/media_pipeline_backend.h"
#include "chromecast/public/media/stream_id.h"
namespace media {
@@ -21,17 +21,16 @@ class VideoDecoderConfig;
namespace chromecast {
namespace media {
-class AudioPipelineDevice;
class AvPipelineImpl;
class BrowserCdmCast;
class BufferingState;
class CodedFrameProvider;
-class AudioPipelineImpl : public AudioPipeline {
+class AudioPipelineImpl {
public:
- // |buffering_controller| can be NULL.
- explicit AudioPipelineImpl(AudioPipelineDevice* audio_device);
- ~AudioPipelineImpl() override;
+ AudioPipelineImpl(MediaPipelineBackend::AudioDecoder* decoder,
+ const AvPipelineClient& client);
+ ~AudioPipelineImpl();
// Input port of the pipeline.
void SetCodedFrameProvider(scoped_ptr<CodedFrameProvider> frame_provider);
@@ -52,9 +51,11 @@ class AudioPipelineImpl : public AudioPipeline {
// Update the playback statistics for this audio stream.
void UpdateStatistics();
- // AudioPipeline implementation.
- void SetClient(const AvPipelineClient& client) override;
- void SetVolume(float volume) override;
+ void SetVolume(float volume);
+
+ void OnBufferPushed(MediaPipelineBackend::BufferStatus status);
+ void OnEndOfStream();
+ void OnError();
private:
void OnFlushDone(const ::media::PipelineStatusCB& status_cb);
@@ -62,7 +63,7 @@ class AudioPipelineImpl : public AudioPipeline {
const ::media::AudioDecoderConfig& audio_config,
const ::media::VideoDecoderConfig& video_config);
- AudioPipelineDevice* audio_device_;
+ MediaPipelineBackend::AudioDecoder* decoder_;
scoped_ptr<AvPipelineImpl> av_pipeline_impl_;
AvPipelineClient audio_client_;
diff --git a/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc b/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
index 4d71652..040f96e 100644
--- a/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
+++ b/chromecast/media/cma/pipeline/audio_video_pipeline_impl_unittest.cc
@@ -23,8 +23,6 @@
#include "chromecast/media/cma/pipeline/video_pipeline_impl.h"
#include "chromecast/media/cma/test/frame_generator_for_test.h"
#include "chromecast/media/cma/test/mock_frame_provider.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
-#include "chromecast/public/media/media_clock_device.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_decoder_config.h"
@@ -63,9 +61,9 @@ AudioVideoPipelineImplTest::AudioVideoPipelineImplTest()
: message_loop_(new base::MessageLoop()),
task_runner_(new TaskRunnerImpl()),
media_pipeline_(new MediaPipelineImpl()) {
- MediaPipelineDeviceParams params(task_runner_.get());
scoped_ptr<MediaPipelineBackend> backend =
- make_scoped_ptr(new MediaPipelineBackendDefault(params));
+ make_scoped_ptr(new MediaPipelineBackendDefault());
+
media_pipeline_->Initialize(kLoadTypeURL, backend.Pass());
media_pipeline_->SetPlaybackRate(1.0);
}
@@ -77,18 +75,6 @@ void AudioVideoPipelineImplTest::Initialize(
const base::Closure& done_cb,
::media::PipelineStatus status,
bool is_audio) {
- if (is_audio) {
- AvPipelineClient client;
- client.eos_cb =
- base::Bind(&AudioVideoPipelineImplTest::OnEos, base::Unretained(this));
- media_pipeline_->GetAudioPipeline()->SetClient(client);
- } else {
- VideoPipelineClient client;
- client.av_pipeline_client.eos_cb =
- base::Bind(&AudioVideoPipelineImplTest::OnEos, base::Unretained(this));
- media_pipeline_->GetVideoPipeline()->SetClient(client);
- }
-
::media::AudioDecoderConfig audio_config(
::media::kCodecMP3,
::media::kSampleFormatS16,
@@ -129,19 +115,32 @@ void AudioVideoPipelineImplTest::Initialize(
done_cb);
scoped_ptr<CodedFrameProvider> frame_provider_base(frame_provider.release());
- base::Closure task = is_audio ?
- base::Bind(&MediaPipeline::InitializeAudio,
- base::Unretained(media_pipeline_.get()),
- audio_config,
- base::Passed(&frame_provider_base),
- next_task) :
- base::Bind(&MediaPipeline::InitializeVideo,
- base::Unretained(media_pipeline_.get()),
- video_configs,
- base::Passed(&frame_provider_base),
- next_task);
- base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, task);
+ if (is_audio) {
+ AvPipelineClient client;
+ client.eos_cb =
+ base::Bind(&AudioVideoPipelineImplTest::OnEos, base::Unretained(this));
+
+ base::Closure task = base::Bind(&MediaPipelineImpl::InitializeAudio,
+ base::Unretained(media_pipeline_.get()),
+ audio_config,
+ client,
+ base::Passed(&frame_provider_base),
+ next_task);
+ base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, task);
+ } else {
+ VideoPipelineClient client;
+ client.av_pipeline_client.eos_cb =
+ base::Bind(&AudioVideoPipelineImplTest::OnEos, base::Unretained(this));
+
+ base::Closure task = base::Bind(&MediaPipelineImpl::InitializeVideo,
+ base::Unretained(media_pipeline_.get()),
+ video_configs,
+ client,
+ base::Passed(&frame_provider_base),
+ next_task);
+ base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, task);
+ }
}
void AudioVideoPipelineImplTest::StartPlaying(
@@ -164,7 +163,8 @@ void AudioVideoPipelineImplTest::Flush(
done_cb);
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
- base::Bind(&MediaPipeline::Flush, base::Unretained(media_pipeline_.get()),
+ base::Bind(&MediaPipelineImpl::Flush,
+ base::Unretained(media_pipeline_.get()),
next_task));
}
diff --git a/chromecast/media/cma/pipeline/av_pipeline_impl.cc b/chromecast/media/cma/pipeline/av_pipeline_impl.cc
index ae31b3a..a8eff13 100644
--- a/chromecast/media/cma/pipeline/av_pipeline_impl.cc
+++ b/chromecast/media/cma/pipeline/av_pipeline_impl.cc
@@ -13,16 +13,11 @@
#include "chromecast/media/cdm/browser_cdm_cast.h"
#include "chromecast/media/cma/base/buffering_frame_provider.h"
#include "chromecast/media/cma/base/buffering_state.h"
-#include "chromecast/media/cma/base/cast_decoder_buffer_impl.h"
#include "chromecast/media/cma/base/cma_logging.h"
#include "chromecast/media/cma/base/coded_frame_provider.h"
#include "chromecast/media/cma/base/decoder_buffer_base.h"
#include "chromecast/media/cma/pipeline/decrypt_util.h"
-#include "chromecast/media/cma/pipeline/frame_status_cb_impl.h"
-#include "chromecast/media/cma/pipeline/media_component_device_client_impl.h"
#include "chromecast/public/media/cast_decrypt_config.h"
-#include "chromecast/public/media/media_clock_device.h"
-#include "chromecast/public/media/media_component_device.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/decrypt_config.h"
@@ -37,29 +32,28 @@ const int kNoCallbackId = -1;
} // namespace
-AvPipelineImpl::AvPipelineImpl(MediaComponentDevice* media_component_device,
+AvPipelineImpl::AvPipelineImpl(MediaPipelineBackend::Decoder* decoder,
const UpdateConfigCB& update_config_cb)
: update_config_cb_(update_config_cb),
- media_component_device_(media_component_device),
+ decoder_(decoder),
state_(kUninitialized),
buffered_time_(::media::kNoTimestamp()),
playable_buffered_time_(::media::kNoTimestamp()),
enable_feeding_(false),
pending_read_(false),
- pending_push_(false),
+ pushed_buffer_(nullptr),
enable_time_update_(false),
pending_time_update_task_(false),
media_keys_(NULL),
media_keys_callback_id_(kNoCallbackId),
weak_factory_(this) {
- DCHECK(media_component_device);
+ DCHECK(decoder_);
weak_this_ = weak_factory_.GetWeakPtr();
thread_checker_.DetachFromThread();
}
AvPipelineImpl::~AvPipelineImpl() {
DCHECK(thread_checker_.CalledOnValidThread());
- media_component_device_->SetClient(nullptr);
if (media_keys_ && media_keys_callback_id_ != kNoCallbackId)
media_keys_->UnregisterPlayer(media_keys_callback_id_);
@@ -78,30 +72,11 @@ void AvPipelineImpl::SetCodedFrameProvider(
DCHECK(frame_provider);
// Wrap the incoming frame provider to add some buffering capabilities.
- frame_provider_.reset(
- new BufferingFrameProvider(
- frame_provider.Pass(),
- max_buffer_size,
- max_frame_size,
- base::Bind(&AvPipelineImpl::OnFrameBuffered, weak_this_)));
-}
-
-void AvPipelineImpl::SetClient(const AvPipelineClient& client) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK_EQ(state_, kUninitialized);
- client_ = client;
-}
-
-bool AvPipelineImpl::Initialize() {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK_EQ(state_, kUninitialized);
-
- media_component_device_->SetClient(new MediaComponentDeviceClientImpl(
- base::Bind(&AvPipelineImpl::OnEos, weak_this_)));
- if (!media_component_device_->SetState(MediaComponentDevice::kStateIdle))
- return false;
-
- return true;
+ frame_provider_.reset(new BufferingFrameProvider(
+ frame_provider.Pass(),
+ max_buffer_size,
+ max_frame_size,
+ base::Bind(&AvPipelineImpl::OnDataBuffered, weak_this_)));
}
bool AvPipelineImpl::StartPlayingFrom(
@@ -110,22 +85,12 @@ bool AvPipelineImpl::StartPlayingFrom(
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK_EQ(state_, kFlushed);
- // Media time where rendering should start
- // and switch to a state where the audio device accepts incoming buffers.
- if (!media_component_device_->SetStartPts(time.InMicroseconds()) ||
- !media_component_device_->SetState(MediaComponentDevice::kStatePaused)) {
- return false;
- }
-
// Buffering related initialization.
DCHECK(frame_provider_);
buffering_state_ = buffering_state;
if (buffering_state_.get())
buffering_state_->SetMediaTime(time);
- if (!media_component_device_->SetState(MediaComponentDevice::kStateRunning))
- return false;
-
// Start feeding the pipeline.
enable_feeding_ = true;
base::ThreadTaskRunnerHandle::Get()->PostTask(
@@ -137,11 +102,9 @@ bool AvPipelineImpl::StartPlayingFrom(
void AvPipelineImpl::Flush(const base::Closure& done_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK_EQ(state_, kFlushing);
- DCHECK_EQ(
- media_component_device_->GetState(), MediaComponentDevice::kStateRunning);
+
// Note: returning to idle state aborts any pending frame push.
- media_component_device_->SetState(MediaComponentDevice::kStateIdle);
- pending_push_ = false;
+ pushed_buffer_.set_buffer(nullptr);
// Break the feeding loop.
enable_feeding_ = false;
@@ -166,18 +129,6 @@ void AvPipelineImpl::Stop() {
// Stop feeding the pipeline.
enable_feeding_ = false;
-
- // Release hardware resources on Stop.
- if (media_component_device_->GetState() ==
- MediaComponentDevice::kStatePaused ||
- media_component_device_->GetState() ==
- MediaComponentDevice::kStateRunning) {
- media_component_device_->SetState(MediaComponentDevice::kStateIdle);
- }
- if (media_component_device_->GetState() == MediaComponentDevice::kStateIdle) {
- media_component_device_->SetState(
- MediaComponentDevice::kStateUninitialized);
- }
}
void AvPipelineImpl::SetCdm(BrowserCdmCast* media_keys) {
@@ -193,16 +144,6 @@ void AvPipelineImpl::SetCdm(BrowserCdmCast* media_keys) {
base::Bind(&AvPipelineImpl::OnCdmDestroyed, weak_this_));
}
-void AvPipelineImpl::OnEos() {
- DCHECK(thread_checker_.CalledOnValidThread());
- CMALOG(kLogControl) << __FUNCTION__;
- if (state_ != kPlaying)
- return;
-
- if (!client_.eos_cb.is_null())
- client_.eos_cb.Run();
-}
-
void AvPipelineImpl::FetchBufferIfNeeded() {
DCHECK(thread_checker_.CalledOnValidThread());
if (!enable_feeding_)
@@ -245,7 +186,7 @@ void AvPipelineImpl::ProcessPendingBuffer() {
return;
}
- if (!pending_buffer_.get() || pending_push_)
+ if (!pending_buffer_.get() || pushed_buffer_.buffer())
return;
// Break the feeding loop when the end of stream is reached.
@@ -289,21 +230,20 @@ void AvPipelineImpl::ProcessPendingBuffer() {
buffering_state_->SetMaxRenderingTime(timestamp);
}
- MediaComponentDevice::FrameStatus status = media_component_device_->PushFrame(
- decrypt_context.release(), new CastDecoderBufferImpl(pending_buffer_),
- new FrameStatusCBImpl(
- base::Bind(&AvPipelineImpl::OnFramePushed, weak_this_)));
+ DCHECK(!pushed_buffer_.buffer());
+ pushed_buffer_.set_buffer(pending_buffer_);
+ MediaPipelineBackend::BufferStatus status =
+ decoder_->PushBuffer(decrypt_context.release(), &pushed_buffer_);
pending_buffer_ = scoped_refptr<DecoderBufferBase>();
- pending_push_ = (status == MediaComponentDevice::kFramePending);
- if (!pending_push_)
- OnFramePushed(status);
+ if (status != MediaPipelineBackend::kBufferPending)
+ OnBufferPushed(status);
}
-void AvPipelineImpl::OnFramePushed(MediaComponentDevice::FrameStatus status) {
+void AvPipelineImpl::OnBufferPushed(MediaPipelineBackend::BufferStatus status) {
DCHECK(thread_checker_.CalledOnValidThread());
- pending_push_ = false;
- if (status == MediaComponentDevice::kFrameFailed) {
+ pushed_buffer_.set_buffer(nullptr);
+ if (status == MediaPipelineBackend::kBufferFailed) {
LOG(WARNING) << "AvPipelineImpl: PushFrame failed";
enable_feeding_ = false;
state_ = kError;
@@ -329,7 +269,7 @@ void AvPipelineImpl::OnCdmDestroyed() {
media_keys_ = NULL;
}
-void AvPipelineImpl::OnFrameBuffered(
+void AvPipelineImpl::OnDataBuffered(
const scoped_refptr<DecoderBufferBase>& buffer,
bool is_at_max_capacity) {
DCHECK(thread_checker_.CalledOnValidThread());
diff --git a/chromecast/media/cma/pipeline/av_pipeline_impl.h b/chromecast/media/cma/pipeline/av_pipeline_impl.h
index 2aa9d51..fe1a828 100644
--- a/chromecast/media/cma/pipeline/av_pipeline_impl.h
+++ b/chromecast/media/cma/pipeline/av_pipeline_impl.h
@@ -12,8 +12,8 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
-#include "chromecast/media/cma/pipeline/av_pipeline_client.h"
-#include "chromecast/public/media/media_component_device.h"
+#include "chromecast/media/cma/base/cast_decoder_buffer_impl.h"
+#include "chromecast/public/media/media_pipeline_backend.h"
#include "chromecast/public/media/stream_id.h"
namespace media {
@@ -28,7 +28,6 @@ class BufferingFrameProvider;
class BufferingState;
class CodedFrameProvider;
class DecoderBufferBase;
-class MediaComponentDevice;
class AvPipelineImpl {
public:
@@ -47,9 +46,8 @@ class AvPipelineImpl {
const ::media::AudioDecoderConfig&,
const ::media::VideoDecoderConfig&)> UpdateConfigCB;
- AvPipelineImpl(
- MediaComponentDevice* media_component_device,
- const UpdateConfigCB& update_config_cb);
+ AvPipelineImpl(MediaPipelineBackend::Decoder* decoder,
+ const UpdateConfigCB& update_config_cb);
~AvPipelineImpl();
// Setting the frame provider or the client must be done in the
@@ -57,10 +55,6 @@ class AvPipelineImpl {
void SetCodedFrameProvider(scoped_ptr<CodedFrameProvider> frame_provider,
size_t max_buffer_size,
size_t max_frame_size);
- void SetClient(const AvPipelineClient& client);
-
- // Initialize the pipeline.
- bool Initialize();
// Setup the pipeline and ensure samples are available for the given media
// time, then start rendering samples.
@@ -79,14 +73,13 @@ class AvPipelineImpl {
void SetCdm(BrowserCdmCast* media_keys);
+ void OnBufferPushed(MediaPipelineBackend::BufferStatus status);
+
private:
// Callback invoked when the CDM state has changed in a way that might
// impact media playback.
void OnCdmStateChange();
- // Callback invoked when playback has reached the end of stream.
- void OnEos();
-
// Feed the pipeline, getting the frames from |frame_provider_|.
void FetchBufferIfNeeded();
@@ -98,28 +91,24 @@ class AvPipelineImpl {
// Process a pending buffer.
void ProcessPendingBuffer();
- void OnFramePushed(MediaComponentDevice::FrameStatus status);
-
// Callbacks:
// - when BrowserCdm updated its state.
// - when BrowserCdm has been destroyed.
void OnCdmStateChanged();
void OnCdmDestroyed();
- // Callback invoked when a frame has been buffered by |frame_provider_|
+ // Callback invoked when a media buffer has been buffered by |frame_provider_|
// which is a BufferingFrameProvider.
- void OnFrameBuffered(const scoped_refptr<DecoderBufferBase>& buffer,
- bool is_at_max_capacity);
+ void OnDataBuffered(const scoped_refptr<DecoderBufferBase>& buffer,
+ bool is_at_max_capacity);
void UpdatePlayableFrames();
base::ThreadChecker thread_checker_;
UpdateConfigCB update_config_cb_;
- AvPipelineClient client_;
-
// Backends.
- MediaComponentDevice* media_component_device_;
+ MediaPipelineBackend::Decoder* decoder_;
// AV pipeline state.
State state_;
@@ -147,11 +136,11 @@ class AvPipelineImpl {
// Indicate whether there is a pending buffer read.
bool pending_read_;
- // Pending buffer.
+ // Pending buffer (not pushed to device yet)
scoped_refptr<DecoderBufferBase> pending_buffer_;
- // Indicate if there is a frame being pushed to the audio device.
- bool pending_push_;
+ // Buffer that has been pushed to the device but not processed yet.
+ CastDecoderBufferImpl pushed_buffer_;
// The media time is retrieved at regular intervals.
// Indicate whether time update is enabled.
diff --git a/chromecast/media/cma/pipeline/frame_status_cb_impl.cc b/chromecast/media/cma/pipeline/frame_status_cb_impl.cc
deleted file mode 100644
index 06769f7..0000000
--- a/chromecast/media/cma/pipeline/frame_status_cb_impl.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/pipeline/frame_status_cb_impl.h"
-
-#include "base/bind.h"
-#include "base/location.h"
-#include "base/single_thread_task_runner.h"
-#include "base/thread_task_runner_handle.h"
-
-namespace chromecast {
-namespace media {
-
-FrameStatusCBImpl::FrameStatusCBImpl(const CallbackType& cb)
- : cb_(cb), task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
-
-FrameStatusCBImpl::~FrameStatusCBImpl() {}
-
-void FrameStatusCBImpl::Run(MediaComponentDevice::FrameStatus status) {
- if (task_runner_->BelongsToCurrentThread()) {
- if (!cb_.is_null())
- cb_.Run(status);
- } else {
- task_runner_->PostTask(FROM_HERE, base::Bind(cb_, status));
- }
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/pipeline/frame_status_cb_impl.h b/chromecast/media/cma/pipeline/frame_status_cb_impl.h
deleted file mode 100644
index 9cd3de7..0000000
--- a/chromecast/media/cma/pipeline/frame_status_cb_impl.h
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_PIPELINE_FRAME_STATUS_CB_IMPL_H_
-#define CHROMECAST_MEDIA_CMA_PIPELINE_FRAME_STATUS_CB_IMPL_H_
-
-#include "base/callback.h"
-#include "base/memory/ref_counted.h"
-#include "chromecast/public/media/media_component_device.h"
-
-namespace base {
-class SingleThreadTaskRunner;
-}
-
-namespace chromecast {
-namespace media {
-
-// Helper for implementing MediaComponentDevice::FrameStatusCB with
-// a base::Callback.
-class FrameStatusCBImpl : public MediaComponentDevice::FrameStatusCB {
- public:
- typedef base::Callback<void(MediaComponentDevice::FrameStatus)> CallbackType;
-
- FrameStatusCBImpl(const CallbackType& cb);
- ~FrameStatusCBImpl() override;
-
- void Run(MediaComponentDevice::FrameStatus status) override;
-
- private:
- CallbackType cb_;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_PIPELINE_FRAME_STATUS_CB_IMPL_H_
diff --git a/chromecast/media/cma/pipeline/media_component_device_client_impl.cc b/chromecast/media/cma/pipeline/media_component_device_client_impl.cc
deleted file mode 100644
index e81ec0a..0000000
--- a/chromecast/media/cma/pipeline/media_component_device_client_impl.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/pipeline/media_component_device_client_impl.h"
-
-#include "base/bind.h"
-#include "base/location.h"
-#include "base/single_thread_task_runner.h"
-#include "base/thread_task_runner_handle.h"
-
-namespace chromecast {
-namespace media {
-
-MediaComponentDeviceClientImpl::MediaComponentDeviceClientImpl(
- const base::Closure& eos_cb)
- : eos_cb_(eos_cb), task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
-
-MediaComponentDeviceClientImpl::~MediaComponentDeviceClientImpl() {}
-
-void MediaComponentDeviceClientImpl::OnEndOfStream() {
- if (task_runner_->BelongsToCurrentThread()) {
- if (!eos_cb_.is_null())
- eos_cb_.Run();
- } else {
- task_runner_->PostTask(FROM_HERE, eos_cb_);
- }
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/pipeline/media_component_device_client_impl.h b/chromecast/media/cma/pipeline/media_component_device_client_impl.h
deleted file mode 100644
index 8f47d30..0000000
--- a/chromecast/media/cma/pipeline/media_component_device_client_impl.h
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_PIPELINE_MEDIA_COMPONENT_DEVICE_CLIENT_IMPL_H_
-#define CHROMECAST_MEDIA_CMA_PIPELINE_MEDIA_COMPONENT_DEVICE_CLIENT_IMPL_H_
-
-#include "base/callback.h"
-#include "base/memory/ref_counted.h"
-#include "chromecast/public/media/media_component_device.h"
-#include "chromecast/public/media/video_pipeline_device.h"
-
-namespace base {
-class SingleThreadTaskRunner;
-}
-
-namespace chromecast {
-namespace media {
-
-// Helper for implementing MediaComponentDevice::Client with
-// a base::Callback.
-class MediaComponentDeviceClientImpl : public MediaComponentDevice::Client {
- public:
- MediaComponentDeviceClientImpl(const base::Closure& eos_cb);
- ~MediaComponentDeviceClientImpl() override;
-
- void OnEndOfStream() override;
-
- private:
- base::Closure eos_cb_;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_PIPELINE_MEDIA_COMPONENT_DEVICE_CLIENT_IMPL_H_
diff --git a/chromecast/media/cma/pipeline/media_pipeline.h b/chromecast/media/cma/pipeline/media_pipeline.h
deleted file mode 100644
index 625917c..0000000
--- a/chromecast/media/cma/pipeline/media_pipeline.h
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_PIPELINE_MEDIA_PIPELINE_H_
-#define CHROMECAST_MEDIA_CMA_PIPELINE_MEDIA_PIPELINE_H_
-
-#include <vector>
-
-#include "base/basictypes.h"
-#include "base/macros.h"
-#include "base/time/time.h"
-#include "media/base/pipeline_status.h"
-
-namespace media {
-class AudioDecoderConfig;
-class BrowserCdm;
-class VideoDecoderConfig;
-}
-
-namespace chromecast {
-namespace media {
-class AudioPipeline;
-class CodedFrameProvider;
-struct MediaPipelineClient;
-class VideoPipeline;
-
-class MediaPipeline {
- public:
- MediaPipeline() {}
- virtual ~MediaPipeline() {}
-
- // Set the media pipeline client.
- virtual void SetClient(const MediaPipelineClient& client) = 0;
-
- // Set the CDM to use for decryption.
- // The CDM is refered by its id.
- virtual void SetCdm(int cdm_id) = 0;
-
- // Return the audio/video pipeline owned by the MediaPipeline.
- virtual AudioPipeline* GetAudioPipeline() const = 0;
- virtual VideoPipeline* GetVideoPipeline() const = 0;
-
- // Create an audio/video pipeline.
- // MediaPipeline owns the resulting audio/video pipeline.
- // Only one audio and one video pipeline can be created.
- virtual void InitializeAudio(
- const ::media::AudioDecoderConfig& config,
- scoped_ptr<CodedFrameProvider> frame_provider,
- const ::media::PipelineStatusCB& status_cb) = 0;
- virtual void InitializeVideo(
- const std::vector<::media::VideoDecoderConfig>& configs,
- scoped_ptr<CodedFrameProvider> frame_provider,
- const ::media::PipelineStatusCB& status_cb) = 0;
-
- // Control the media pipeline state machine.
- virtual void StartPlayingFrom(base::TimeDelta time) = 0;
- virtual void Flush(const ::media::PipelineStatusCB& status_cb) = 0;
- virtual void Stop() = 0;
-
- // Set the playback rate.
- virtual void SetPlaybackRate(double playback_rate) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MediaPipeline);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_PIPELINE_MEDIA_PIPELINE_H_
diff --git a/chromecast/media/cma/pipeline/media_pipeline_client.h b/chromecast/media/cma/pipeline/media_pipeline_client.h
index 1918ef1..340a3e5 100644
--- a/chromecast/media/cma/pipeline/media_pipeline_client.h
+++ b/chromecast/media/cma/pipeline/media_pipeline_client.h
@@ -14,8 +14,8 @@ namespace chromecast {
namespace media {
struct MediaPipelineClient {
- typedef base::Callback<void(
- base::TimeDelta, base::TimeDelta, base::TimeTicks)> TimeUpdateCB;
+ typedef base::Callback<
+ void(base::TimeDelta, base::TimeDelta, base::TimeTicks)> TimeUpdateCB;
MediaPipelineClient();
~MediaPipelineClient();
diff --git a/chromecast/media/cma/pipeline/media_pipeline_impl.cc b/chromecast/media/cma/pipeline/media_pipeline_impl.cc
index 1b39870..85ead1e 100644
--- a/chromecast/media/cma/pipeline/media_pipeline_impl.cc
+++ b/chromecast/media/cma/pipeline/media_pipeline_impl.cc
@@ -19,7 +19,6 @@
#include "chromecast/media/cma/base/coded_frame_provider.h"
#include "chromecast/media/cma/pipeline/audio_pipeline_impl.h"
#include "chromecast/media/cma/pipeline/video_pipeline_impl.h"
-#include "chromecast/public/media/media_clock_device.h"
#include "chromecast/public/media/media_pipeline_backend.h"
#include "media/base/timestamp_constants.h"
@@ -51,9 +50,13 @@ const int kStatisticsUpdatePeriod = 4;
} // namespace
MediaPipelineImpl::MediaPipelineImpl()
- : has_audio_(false),
+ : audio_decoder_(nullptr),
+ video_decoder_(nullptr),
+ backend_initialized_(false),
+ has_audio_(false),
has_video_(false),
- target_playback_rate_(0.0),
+ paused_(false),
+ target_playback_rate_(1.0f),
enable_time_update_(false),
pending_time_update_task_(false),
statistics_rolling_counter_(0),
@@ -84,7 +87,6 @@ void MediaPipelineImpl::Initialize(
CMALOG(kLogControl) << __FUNCTION__;
DCHECK(thread_checker_.CalledOnValidThread());
media_pipeline_backend_.reset(media_pipeline_backend.release());
- clock_device_ = media_pipeline_backend_->GetClock();
if (!client_.pipeline_backend_created_cb.is_null())
client_.pipeline_backend_created_cb.Run();
@@ -101,12 +103,6 @@ void MediaPipelineImpl::Initialize(
buffering_config,
base::Bind(&MediaPipelineImpl::OnBufferingNotification, weak_this_)));
}
-
- audio_pipeline_.reset(
- new AudioPipelineImpl(media_pipeline_backend_->GetAudio()));
-
- video_pipeline_.reset(
- new VideoPipelineImpl(media_pipeline_backend_->GetVideo()));
}
void MediaPipelineImpl::SetClient(const MediaPipelineClient& client) {
@@ -127,6 +123,36 @@ void MediaPipelineImpl::SetCdm(int cdm_id) {
// One possibility would be a GetCdmByIdCB that's passed in.
}
+void MediaPipelineImpl::OnVideoResolutionChanged(
+ MediaPipelineBackend::VideoDecoder* decoder,
+ const Size& size) {
+ DCHECK(decoder == video_decoder_);
+ video_pipeline_->OnNaturalSizeChanged(size);
+}
+
+void MediaPipelineImpl::OnPushBufferComplete(
+ MediaPipelineBackend::Decoder* decoder,
+ MediaPipelineBackend::BufferStatus status) {
+ if (decoder == audio_decoder_)
+ audio_pipeline_->OnBufferPushed(status);
+ else if (decoder == video_decoder_)
+ video_pipeline_->OnBufferPushed(status);
+}
+
+void MediaPipelineImpl::OnEndOfStream(MediaPipelineBackend::Decoder* decoder) {
+ if (decoder == audio_decoder_)
+ audio_pipeline_->OnEndOfStream();
+ else if (decoder == video_decoder_)
+ video_pipeline_->OnEndOfStream();
+}
+
+void MediaPipelineImpl::OnDecoderError(MediaPipelineBackend::Decoder* decoder) {
+ if (decoder == audio_decoder_)
+ audio_pipeline_->OnError();
+ else if (decoder == video_decoder_)
+ video_pipeline_->OnError();
+}
+
void MediaPipelineImpl::SetCdm(BrowserCdmCast* cdm) {
CMALOG(kLogControl) << __FUNCTION__;
DCHECK(thread_checker_.CalledOnValidThread());
@@ -134,41 +160,41 @@ void MediaPipelineImpl::SetCdm(BrowserCdmCast* cdm) {
video_pipeline_->SetCdm(cdm);
}
-AudioPipeline* MediaPipelineImpl::GetAudioPipeline() const {
- return audio_pipeline_.get();
-}
-
-VideoPipeline* MediaPipelineImpl::GetVideoPipeline() const {
- return video_pipeline_.get();
-}
-
void MediaPipelineImpl::InitializeAudio(
const ::media::AudioDecoderConfig& config,
+ const AvPipelineClient& client,
scoped_ptr<CodedFrameProvider> frame_provider,
const ::media::PipelineStatusCB& status_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!has_audio_);
- if (clock_device_->GetState() == MediaClockDevice::kStateUninitialized &&
- !clock_device_->SetState(MediaClockDevice::kStateIdle)) {
- status_cb.Run(::media::PIPELINE_ERROR_INITIALIZATION_FAILED);
+
+ has_audio_ = true;
+
+ audio_decoder_ = media_pipeline_backend_->CreateAudioDecoder();
+ if (!audio_decoder_) {
+ status_cb.Run(::media::PIPELINE_ERROR_ABORT);
return;
}
- has_audio_ = true;
+ audio_pipeline_.reset(new AudioPipelineImpl(audio_decoder_, client));
audio_pipeline_->Initialize(config, frame_provider.Pass(), status_cb);
}
void MediaPipelineImpl::InitializeVideo(
- const std::vector<::media::VideoDecoderConfig>& configs,
+ const std::vector< ::media::VideoDecoderConfig>& configs,
+ const VideoPipelineClient& client,
scoped_ptr<CodedFrameProvider> frame_provider,
const ::media::PipelineStatusCB& status_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!has_video_);
- if (clock_device_->GetState() == MediaClockDevice::kStateUninitialized &&
- !clock_device_->SetState(MediaClockDevice::kStateIdle)) {
- status_cb.Run(::media::PIPELINE_ERROR_INITIALIZATION_FAILED);
+
+ has_video_ = true;
+ video_decoder_ = media_pipeline_backend_->CreateVideoDecoder();
+ if (!video_decoder_) {
+ status_cb.Run(::media::PIPELINE_ERROR_ABORT);
return;
}
- has_video_ = true;
+ video_pipeline_.reset(new VideoPipelineImpl(video_decoder_, client));
+
video_pipeline_->Initialize(configs, frame_provider.Pass(), status_cb);
}
@@ -177,14 +203,20 @@ void MediaPipelineImpl::StartPlayingFrom(base::TimeDelta time) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(has_audio_ || has_video_);
DCHECK(!pending_flush_callbacks_);
+ // When starting, we always enter the "playing" state (not paused).
+ paused_ = false;
- // Reset the start of the timeline.
- DCHECK_EQ(clock_device_->GetState(), MediaClockDevice::kStateIdle);
- clock_device_->ResetTimeline(time.InMicroseconds());
+ // Lazy initialise
+ if (!backend_initialized_) {
+ backend_initialized_ = media_pipeline_backend_->Initialize(this);
+ if (!backend_initialized_) {
+ OnError(::media::PIPELINE_ERROR_ABORT);
+ return;
+ }
+ }
- // Start the clock. If the playback rate is 0, then the clock is started
- // but does not increase.
- if (!clock_device_->SetState(MediaClockDevice::kStateRunning)) {
+ // Start the backend.
+ if (!media_pipeline_backend_->Start(time.InMicroseconds())) {
OnError(::media::PIPELINE_ERROR_ABORT);
return;
}
@@ -224,16 +256,14 @@ void MediaPipelineImpl::Flush(const ::media::PipelineStatusCB& status_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(has_audio_ || has_video_);
DCHECK(!pending_flush_callbacks_);
- DCHECK(clock_device_->GetState() == MediaClockDevice::kStateUninitialized ||
- clock_device_->GetState() == MediaClockDevice::kStateRunning);
// No need to update media time anymore.
enable_time_update_ = false;
buffering_controller_->Reset();
- // The clock should return to idle.
- if (!clock_device_->SetState(MediaClockDevice::kStateIdle)) {
+ // Stop the backend
+ if (!media_pipeline_backend_->Stop()) {
status_cb.Run(::media::PIPELINE_ERROR_ABORT);
return;
}
@@ -269,34 +299,42 @@ void MediaPipelineImpl::Stop() {
// No need to update media time anymore.
enable_time_update_ = false;
- // Release hardware resources on Stop.
- // Note: Stop can be called from any state.
- if (clock_device_->GetState() == MediaClockDevice::kStateRunning)
- clock_device_->SetState(MediaClockDevice::kStateIdle);
- if (clock_device_->GetState() == MediaClockDevice::kStateIdle)
- clock_device_->SetState(MediaClockDevice::kStateUninitialized);
-
// Stop both the audio and video pipeline.
if (has_audio_)
audio_pipeline_->Stop();
if (has_video_)
video_pipeline_->Stop();
+
+ // Release hardware resources on Stop.
+ audio_pipeline_ = nullptr;
+ video_pipeline_ = nullptr;
+ media_pipeline_backend_.reset();
}
void MediaPipelineImpl::SetPlaybackRate(double rate) {
CMALOG(kLogControl) << __FUNCTION__ << " rate=" << rate;
DCHECK(thread_checker_.CalledOnValidThread());
+ if (!buffering_controller_ || !buffering_controller_->IsBuffering()) {
+ if (paused_ && rate != 0.0f) {
+ if (rate != target_playback_rate_)
+ media_pipeline_backend_->SetPlaybackRate(rate);
+ paused_ = false;
+ media_pipeline_backend_->Resume();
+ } else if (!paused_ && rate == 0.0f) {
+ paused_ = true;
+ media_pipeline_backend_->Pause();
+ } else {
+ media_pipeline_backend_->SetPlaybackRate(rate);
+ }
+ }
target_playback_rate_ = rate;
- if (!buffering_controller_ || !buffering_controller_->IsBuffering())
- media_pipeline_backend_->GetClock()->SetRate(rate);
-}
-
-AudioPipelineImpl* MediaPipelineImpl::GetAudioPipelineImpl() const {
- return audio_pipeline_.get();
}
-VideoPipelineImpl* MediaPipelineImpl::GetVideoPipelineImpl() const {
- return video_pipeline_.get();
+void MediaPipelineImpl::SetVolume(float volume) {
+ CMALOG(kLogControl) << __FUNCTION__ << " vol=" << volume;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(audio_pipeline_);
+ audio_pipeline_->SetVolume(volume);
}
void MediaPipelineImpl::StateTransition(
@@ -317,16 +355,15 @@ void MediaPipelineImpl::OnBufferingNotification(bool is_buffering) {
client_.buffering_state_cb.Run(buffering_state);
}
- if (media_pipeline_backend_->GetClock()->GetState() ==
- MediaClockDevice::kStateUninitialized) {
- return;
- }
-
if (is_buffering) {
// Do not consume data in a rebuffering phase.
- media_pipeline_backend_->GetClock()->SetRate(0.0);
- } else {
- media_pipeline_backend_->GetClock()->SetRate(target_playback_rate_);
+ if (!paused_) {
+ paused_ = true;
+ media_pipeline_backend_->Pause();
+ }
+ } else if (paused_) {
+ paused_ = false;
+ media_pipeline_backend_->Resume();
}
}
@@ -336,14 +373,16 @@ void MediaPipelineImpl::UpdateMediaTime() {
return;
if (statistics_rolling_counter_ == 0) {
- audio_pipeline_->UpdateStatistics();
- video_pipeline_->UpdateStatistics();
+ if (audio_pipeline_)
+ audio_pipeline_->UpdateStatistics();
+ if (video_pipeline_)
+ video_pipeline_->UpdateStatistics();
}
statistics_rolling_counter_ =
(statistics_rolling_counter_ + 1) % kStatisticsUpdatePeriod;
- base::TimeDelta media_time =
- base::TimeDelta::FromMicroseconds(clock_device_->GetTimeMicroseconds());
+ base::TimeDelta media_time = base::TimeDelta::FromMicroseconds(
+ media_pipeline_backend_->GetCurrentPts());
if (media_time == ::media::kNoTimestamp()) {
pending_time_update_task_ = true;
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
diff --git a/chromecast/media/cma/pipeline/media_pipeline_impl.h b/chromecast/media/cma/pipeline/media_pipeline_impl.h
index e2dba4f..92453cd 100644
--- a/chromecast/media/cma/pipeline/media_pipeline_impl.h
+++ b/chromecast/media/cma/pipeline/media_pipeline_impl.h
@@ -12,20 +12,26 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
#include "chromecast/media/cma/pipeline/load_type.h"
-#include "chromecast/media/cma/pipeline/media_pipeline.h"
#include "chromecast/media/cma/pipeline/media_pipeline_client.h"
+#include "chromecast/public/media/media_pipeline_backend.h"
#include "media/base/serial_runner.h"
+namespace media {
+class AudioDecoderConfig;
+class VideoDecoderConfig;
+} // namespace media
+
namespace chromecast {
namespace media {
class AudioPipelineImpl;
+struct AvPipelineClient;
+struct VideoPipelineClient;
class BrowserCdmCast;
class BufferingController;
-class MediaClockDevice;
-class MediaPipelineBackend;
+class CodedFrameProvider;
class VideoPipelineImpl;
-class MediaPipelineImpl : public MediaPipeline {
+class MediaPipelineImpl : public MediaPipelineBackend::Delegate {
public:
MediaPipelineImpl();
~MediaPipelineImpl() override;
@@ -35,26 +41,30 @@ class MediaPipelineImpl : public MediaPipeline {
void Initialize(LoadType load_type,
scoped_ptr<MediaPipelineBackend> media_pipeline_backend);
- // MediaPipeline implementation.
- void SetClient(const MediaPipelineClient& client) override;
- void SetCdm(int cdm_id) override;
- AudioPipeline* GetAudioPipeline() const override;
- VideoPipeline* GetVideoPipeline() const override;
- void InitializeAudio(
- const ::media::AudioDecoderConfig& config,
- scoped_ptr<CodedFrameProvider> frame_provider,
- const ::media::PipelineStatusCB& status_cb) override;
- void InitializeVideo(
- const std::vector<::media::VideoDecoderConfig>& configs,
- scoped_ptr<CodedFrameProvider> frame_provider,
- const ::media::PipelineStatusCB& status_cb) override;
- void StartPlayingFrom(base::TimeDelta time) override;
- void Flush(const ::media::PipelineStatusCB& status_cb) override;
- void Stop() override;
- void SetPlaybackRate(double playback_rate) override;
-
- AudioPipelineImpl* GetAudioPipelineImpl() const;
- VideoPipelineImpl* GetVideoPipelineImpl() const;
+ void SetClient(const MediaPipelineClient& client);
+ void SetCdm(int cdm_id);
+
+ // MediaPipelineBackendDelegate implementation:
+ void OnVideoResolutionChanged(MediaPipelineBackend::VideoDecoder* decoder,
+ const Size& size) override;
+ void OnPushBufferComplete(MediaPipelineBackend::Decoder* decoder,
+ MediaPipelineBackend::BufferStatus status) override;
+ void OnEndOfStream(MediaPipelineBackend::Decoder* decoder) override;
+ void OnDecoderError(MediaPipelineBackend::Decoder* decoder) override;
+
+ void InitializeAudio(const ::media::AudioDecoderConfig& config,
+ const AvPipelineClient& client,
+ scoped_ptr<CodedFrameProvider> frame_provider,
+ const ::media::PipelineStatusCB& status_cb);
+ void InitializeVideo(const std::vector< ::media::VideoDecoderConfig>& configs,
+ const VideoPipelineClient& client,
+ scoped_ptr<CodedFrameProvider> frame_provider,
+ const ::media::PipelineStatusCB& status_cb);
+ void StartPlayingFrom(base::TimeDelta time);
+ void Flush(const ::media::PipelineStatusCB& status_cb);
+ void Stop();
+ void SetPlaybackRate(double playback_rate);
+ void SetVolume(float volume);
void SetCdm(BrowserCdmCast* cdm);
@@ -77,20 +87,21 @@ class MediaPipelineImpl : public MediaPipeline {
// Interface with the underlying hardware media pipeline.
scoped_ptr<MediaPipelineBackend> media_pipeline_backend_;
- MediaClockDevice* clock_device_;
+ MediaPipelineBackend::AudioDecoder* audio_decoder_;
+ MediaPipelineBackend::VideoDecoder* video_decoder_;
+ bool backend_initialized_;
bool has_audio_;
bool has_video_;
scoped_ptr<AudioPipelineImpl> audio_pipeline_;
scoped_ptr<VideoPipelineImpl> video_pipeline_;
scoped_ptr< ::media::SerialRunner> pending_flush_callbacks_;
+ // Whether or not the backend is currently paused.
+ bool paused_;
// Playback rate set by the upper layer.
float target_playback_rate_;
- // Indicate a possible re-buffering phase.
- bool is_buffering_;
-
// The media time is retrieved at regular intervals.
// Indicate whether time update is enabled.
bool enable_time_update_;
diff --git a/chromecast/media/cma/pipeline/video_pipeline.cc b/chromecast/media/cma/pipeline/video_pipeline.cc
deleted file mode 100644
index 6b428b5..0000000
--- a/chromecast/media/cma/pipeline/video_pipeline.cc
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/pipeline/video_pipeline.h"
-
-namespace chromecast {
-namespace media {
-
-VideoPipeline::VideoPipeline() {
-}
-
-VideoPipeline::~VideoPipeline() {
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/pipeline/video_pipeline.h b/chromecast/media/cma/pipeline/video_pipeline.h
deleted file mode 100644
index 68a9b8c..0000000
--- a/chromecast/media/cma/pipeline/video_pipeline.h
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_PIPELINE_VIDEO_PIPELINE_H_
-#define CHROMECAST_MEDIA_CMA_PIPELINE_VIDEO_PIPELINE_H_
-
-#include "base/macros.h"
-
-namespace chromecast {
-namespace media {
-struct VideoPipelineClient;
-
-class VideoPipeline {
- public:
- VideoPipeline();
- virtual ~VideoPipeline();
-
- virtual void SetClient(const VideoPipelineClient& client) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoPipeline);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_PIPELINE_VIDEO_PIPELINE_H_
diff --git a/chromecast/media/cma/pipeline/video_pipeline_device_client_impl.cc b/chromecast/media/cma/pipeline/video_pipeline_device_client_impl.cc
deleted file mode 100644
index a604be3..0000000
--- a/chromecast/media/cma/pipeline/video_pipeline_device_client_impl.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/pipeline/video_pipeline_device_client_impl.h"
-
-#include "base/bind.h"
-#include "base/location.h"
-#include "base/single_thread_task_runner.h"
-#include "base/thread_task_runner_handle.h"
-#include "chromecast/public/graphics_types.h"
-
-namespace chromecast {
-namespace media {
-
-VideoPipelineDeviceClientImpl::VideoPipelineDeviceClientImpl(
- const SizeChangeCB& size_change_cb)
- : size_change_cb_(size_change_cb),
- task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
-
-VideoPipelineDeviceClientImpl::~VideoPipelineDeviceClientImpl() {}
-
-void VideoPipelineDeviceClientImpl::OnNaturalSizeChanged(const Size& size) {
- if (task_runner_->BelongsToCurrentThread()) {
- if (!size_change_cb_.is_null())
- size_change_cb_.Run(size);
- } else {
- task_runner_->PostTask(FROM_HERE, base::Bind(size_change_cb_, size));
- }
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/pipeline/video_pipeline_device_client_impl.h b/chromecast/media/cma/pipeline/video_pipeline_device_client_impl.h
deleted file mode 100644
index e39fdc9..0000000
--- a/chromecast/media/cma/pipeline/video_pipeline_device_client_impl.h
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_PIPELINE_VIDEO_PIPELINE_DEVICE_CLIENT_IMPL_H_
-#define CHROMECAST_MEDIA_CMA_PIPELINE_VIDEO_PIPELINE_DEVICE_CLIENT_IMPL_H_
-
-#include "base/callback.h"
-#include "base/memory/ref_counted.h"
-#include "chromecast/public/media/media_component_device.h"
-#include "chromecast/public/media/video_pipeline_device.h"
-
-namespace base {
-class SingleThreadTaskRunner;
-}
-
-namespace chromecast {
-namespace media {
-
-// Helper for implementing VideoPipelineDevice::VideoClient with
-// a base::Callback.
-class VideoPipelineDeviceClientImpl : public VideoPipelineDevice::VideoClient {
- public:
- typedef base::Callback<void(const Size&)> SizeChangeCB;
-
- VideoPipelineDeviceClientImpl(const SizeChangeCB& size_change_cb);
- ~VideoPipelineDeviceClientImpl() override;
-
- void OnNaturalSizeChanged(const Size& size) override;
-
- private:
- SizeChangeCB size_change_cb_;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_PIPELINE_VIDEO_PIPELINE_DEVICE_CLIENT_IMPL_H_
diff --git a/chromecast/media/cma/pipeline/video_pipeline_impl.cc b/chromecast/media/cma/pipeline/video_pipeline_impl.cc
index 89d82cf..e24a7f7 100644
--- a/chromecast/media/cma/pipeline/video_pipeline_impl.cc
+++ b/chromecast/media/cma/pipeline/video_pipeline_impl.cc
@@ -10,10 +10,8 @@
#include "chromecast/media/cma/base/coded_frame_provider.h"
#include "chromecast/media/cma/base/decoder_config_adapter.h"
#include "chromecast/media/cma/pipeline/av_pipeline_impl.h"
-#include "chromecast/media/cma/pipeline/video_pipeline_device_client_impl.h"
#include "chromecast/public/graphics_types.h"
#include "chromecast/public/media/decoder_config.h"
-#include "chromecast/public/media/video_pipeline_device.h"
#include "media/base/video_decoder_config.h"
namespace chromecast {
@@ -23,12 +21,15 @@ namespace {
const size_t kMaxVideoFrameSize = 1024 * 1024;
}
-VideoPipelineImpl::VideoPipelineImpl(VideoPipelineDevice* video_device)
- : video_device_(video_device),
+VideoPipelineImpl::VideoPipelineImpl(
+ MediaPipelineBackend::VideoDecoder* video_decoder,
+ const VideoPipelineClient& client)
+ : video_decoder_(video_decoder),
+ video_client_(client),
weak_factory_(this) {
weak_this_ = weak_factory_.GetWeakPtr();
av_pipeline_impl_.reset(new AvPipelineImpl(
- video_device_,
+ video_decoder_,
base::Bind(&VideoPipelineImpl::OnUpdateConfig, base::Unretained(this))));
}
@@ -44,8 +45,7 @@ void VideoPipelineImpl::SetCodedFrameProvider(
bool VideoPipelineImpl::StartPlayingFrom(
base::TimeDelta time,
const scoped_refptr<BufferingState>& buffering_state) {
- CMALOG(kLogControl) << "VideoPipelineImpl::StartPlayingFrom t0="
- << time.InMilliseconds();
+ CMALOG(kLogControl) << __FUNCTION__ << " t0=" << time.InMilliseconds();
// Reset the pipeline statistics.
previous_stats_ = ::media::PipelineStatistics();
@@ -65,7 +65,7 @@ bool VideoPipelineImpl::StartPlayingFrom(
}
void VideoPipelineImpl::Flush(const ::media::PipelineStatusCB& status_cb) {
- CMALOG(kLogControl) << "VideoPipelineImpl::Flush";
+ CMALOG(kLogControl) << __FUNCTION__;
if (av_pipeline_impl_->GetState() == AvPipelineImpl::kError) {
status_cb.Run(::media::PIPELINE_ERROR_ABORT);
return;
@@ -78,7 +78,7 @@ void VideoPipelineImpl::Flush(const ::media::PipelineStatusCB& status_cb) {
void VideoPipelineImpl::OnFlushDone(
const ::media::PipelineStatusCB& status_cb) {
- CMALOG(kLogControl) << "VideoPipelineImpl::OnFlushDone";
+ CMALOG(kLogControl) << __FUNCTION__;
if (av_pipeline_impl_->GetState() == AvPipelineImpl::kError) {
status_cb.Run(::media::PIPELINE_ERROR_ABORT);
return;
@@ -88,7 +88,7 @@ void VideoPipelineImpl::OnFlushDone(
}
void VideoPipelineImpl::Stop() {
- CMALOG(kLogControl) << "VideoPipelineImpl::Stop";
+ CMALOG(kLogControl) << __FUNCTION__;
av_pipeline_impl_->Stop();
av_pipeline_impl_->TransitionToState(AvPipelineImpl::kStopped);
}
@@ -97,9 +97,21 @@ void VideoPipelineImpl::SetCdm(BrowserCdmCast* media_keys) {
av_pipeline_impl_->SetCdm(media_keys);
}
-void VideoPipelineImpl::SetClient(const VideoPipelineClient& client) {
- video_client_ = client;
- av_pipeline_impl_->SetClient(client.av_pipeline_client);
+void VideoPipelineImpl::OnBufferPushed(
+ MediaPipelineBackend::BufferStatus status) {
+ av_pipeline_impl_->OnBufferPushed(status);
+}
+
+void VideoPipelineImpl::OnEndOfStream() {
+ if (!video_client_.av_pipeline_client.eos_cb.is_null())
+ video_client_.av_pipeline_client.eos_cb.Run();
+}
+
+void VideoPipelineImpl::OnError() {
+ if (!video_client_.av_pipeline_client.playback_error_cb.is_null()) {
+ video_client_.av_pipeline_client.playback_error_cb.Run(
+ ::media::PIPELINE_ERROR_COULD_NOT_RENDER);
+ }
}
void VideoPipelineImpl::Initialize(
@@ -111,8 +123,7 @@ void VideoPipelineImpl::Initialize(
CMALOG(kLogControl) << __FUNCTION__ << " "
<< config.AsHumanReadableString();
}
- video_device_->SetVideoClient(new VideoPipelineDeviceClientImpl(
- base::Bind(&VideoPipelineImpl::OnNaturalSizeChanged, weak_this_)));
+
if (frame_provider)
SetCodedFrameProvider(frame_provider.Pass());
@@ -132,8 +143,7 @@ void VideoPipelineImpl::Initialize(
video_config.additional_config = &secondary_config;
}
- if (!video_device_->SetConfig(video_config) ||
- !av_pipeline_impl_->Initialize()) {
+ if (!video_decoder_->SetConfig(video_config)) {
status_cb.Run(::media::PIPELINE_ERROR_INITIALIZATION_FAILED);
return;
}
@@ -146,10 +156,10 @@ void VideoPipelineImpl::OnUpdateConfig(
const ::media::AudioDecoderConfig& audio_config,
const ::media::VideoDecoderConfig& video_config) {
if (video_config.IsValidConfig()) {
- CMALOG(kLogControl) << "VideoPipelineImpl::OnUpdateConfig id:" << id << " "
+ CMALOG(kLogControl) << __FUNCTION__ << " id:" << id << " "
<< video_config.AsHumanReadableString();
- bool success = video_device_->SetConfig(
+ bool success = video_decoder_->SetConfig(
DecoderConfigAdapter::ToCastVideoConfig(id, video_config));
if (!success &&
!video_client_.av_pipeline_client.playback_error_cb.is_null()) {
@@ -173,9 +183,8 @@ void VideoPipelineImpl::UpdateStatistics() {
if (video_client_.av_pipeline_client.statistics_cb.is_null())
return;
- MediaComponentDevice::Statistics device_stats;
- if (!video_device_->GetStatistics(&device_stats))
- return;
+ MediaPipelineBackend::Decoder::Statistics device_stats;
+ video_decoder_->GetStatistics(&device_stats);
::media::PipelineStatistics current_stats;
current_stats.video_bytes_decoded = device_stats.decoded_bytes;
diff --git a/chromecast/media/cma/pipeline/video_pipeline_impl.h b/chromecast/media/cma/pipeline/video_pipeline_impl.h
index e488c26..9fd3071 100644
--- a/chromecast/media/cma/pipeline/video_pipeline_impl.h
+++ b/chromecast/media/cma/pipeline/video_pipeline_impl.h
@@ -11,10 +11,9 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
-#include "chromecast/media/cma/pipeline/video_pipeline.h"
#include "chromecast/media/cma/pipeline/video_pipeline_client.h"
+#include "chromecast/public/media/media_pipeline_backend.h"
#include "chromecast/public/media/stream_id.h"
-#include "chromecast/public/media/video_pipeline_device.h"
namespace media {
class AudioDecoderConfig;
@@ -28,13 +27,12 @@ class AvPipelineImpl;
class BrowserCdmCast;
class BufferingState;
class CodedFrameProvider;
-class VideoPipelineDevice;
-class VideoPipelineImpl : public VideoPipeline {
+class VideoPipelineImpl {
public:
- // |buffering_controller| can be NULL.
- explicit VideoPipelineImpl(VideoPipelineDevice* video_device);
- ~VideoPipelineImpl() override;
+ VideoPipelineImpl(MediaPipelineBackend::VideoDecoder* decoder,
+ const VideoPipelineClient& client);
+ ~VideoPipelineImpl();
// Input port of the pipeline.
void SetCodedFrameProvider(scoped_ptr<CodedFrameProvider> frame_provider);
@@ -55,8 +53,10 @@ class VideoPipelineImpl : public VideoPipeline {
// Update the playback statistics for this video stream.
void UpdateStatistics();
- // VideoPipeline implementation.
- void SetClient(const VideoPipelineClient& client) override;
+ void OnBufferPushed(MediaPipelineBackend::BufferStatus status);
+ void OnEndOfStream();
+ void OnError();
+ void OnNaturalSizeChanged(const Size& size);
private:
class DeviceClientImpl;
@@ -66,9 +66,8 @@ class VideoPipelineImpl : public VideoPipeline {
void OnUpdateConfig(StreamId id,
const ::media::AudioDecoderConfig& audio_config,
const ::media::VideoDecoderConfig& video_config);
- void OnNaturalSizeChanged(const Size& size);
- VideoPipelineDevice* video_device_;
+ MediaPipelineBackend::VideoDecoder* video_decoder_;
scoped_ptr<AvPipelineImpl> av_pipeline_impl_;
VideoPipelineClient video_client_;
diff --git a/chromecast/media/cma/test/cma_end_to_end_test.cc b/chromecast/media/cma/test/cma_end_to_end_test.cc
deleted file mode 100644
index 70483e6..0000000
--- a/chromecast/media/cma/test/cma_end_to_end_test.cc
+++ /dev/null
@@ -1,106 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "base/bind.h"
-#include "base/macros.h"
-#include "base/memory/ref_counted.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/message_loop/message_loop.h"
-#include "base/time/time.h"
-#include "chromecast/base/task_runner_impl.h"
-#include "chromecast/media/cma/backend/media_pipeline_backend_default.h"
-#include "chromecast/media/cma/base/buffering_defs.h"
-#include "chromecast/media/cma/filters/cma_renderer.h"
-#include "chromecast/media/cma/pipeline/media_pipeline_impl.h"
-#include "gpu/command_buffer/client/gles2_interface_stub.h"
-#include "media/base/demuxer_stream_provider.h"
-#include "media/base/fake_demuxer_stream.h"
-#include "media/base/null_video_sink.h"
-#include "media/renderers/mock_gpu_video_accelerator_factories.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace chromecast {
-namespace media {
-
-namespace {
-
-class CmaEndToEndTest : public testing::Test {
- public:
- CmaEndToEndTest() {}
-
- void SetUp() override {
- demuxer_stream_provider_.reset(
- new ::media::FakeDemuxerStreamProvider(1, 1, false));
- null_sink_.reset(new ::media::NullVideoSink(
- false, base::TimeDelta::FromSecondsD(1.0 / 60),
- base::Bind(&MockCB::OnFrameReceived, base::Unretained(&mock_)),
- message_loop_.task_runner()));
-
- scoped_ptr<MediaPipelineImpl> media_pipeline(new MediaPipelineImpl());
- task_runner_.reset(new TaskRunnerImpl());
- MediaPipelineDeviceParams params(task_runner_.get());
- scoped_ptr<MediaPipelineBackend> backend =
- make_scoped_ptr(new MediaPipelineBackendDefault(params));
-
- gles2_.reset(new gpu::gles2::GLES2InterfaceStub());
- mock_gpu_factories_ = new ::media::MockGpuVideoAcceleratorFactories();
-
- EXPECT_CALL(*mock_gpu_factories_.get(), GetGLES2Interface())
- .WillRepeatedly(testing::Return(gles2_.get()));
-
- media_pipeline->Initialize(kLoadTypeMediaSource, backend.Pass());
-
- renderer_.reset(new CmaRenderer(media_pipeline.Pass(), null_sink_.get(),
- mock_gpu_factories_));
- }
- void TearDown() override { message_loop_.RunUntilIdle(); }
-
- ~CmaEndToEndTest() override {}
-
- protected:
- base::MessageLoop message_loop_;
- scoped_ptr<TaskRunnerImpl> task_runner_;
- scoped_ptr<::media::FakeDemuxerStreamProvider> demuxer_stream_provider_;
- scoped_ptr<CmaRenderer> renderer_;
- scoped_refptr<::media::MockGpuVideoAcceleratorFactories> mock_gpu_factories_;
- scoped_ptr<gpu::gles2::GLES2Interface> gles2_;
-
- class MockCB {
- public:
- MOCK_METHOD1(OnInitialized, void(::media::PipelineStatus));
- MOCK_METHOD1(OnFrameReceived,
- void(const scoped_refptr<::media::VideoFrame>&));
- MOCK_METHOD1(OnStatistics, void(const ::media::PipelineStatistics&));
- MOCK_METHOD1(OnBufferingState, void(::media::BufferingState));
- MOCK_METHOD0(OnEnded, void());
- MOCK_METHOD1(OnError, void(::media::PipelineStatus));
- MOCK_METHOD0(OnWaitingForDecryptionKey, void());
- };
- MockCB mock_;
-
- private:
- scoped_ptr<::media::NullVideoSink> null_sink_;
-
- DISALLOW_COPY_AND_ASSIGN(CmaEndToEndTest);
-};
-
-} // namespace
-
-TEST_F(CmaEndToEndTest, TestInitialization) {
- renderer_->Initialize(
- demuxer_stream_provider_.get(),
- base::Bind(&MockCB::OnInitialized, base::Unretained(&mock_)),
- base::Bind(&MockCB::OnStatistics, base::Unretained(&mock_)),
- base::Bind(&MockCB::OnBufferingState, base::Unretained(&mock_)),
- base::Bind(&MockCB::OnEnded, base::Unretained(&mock_)),
- base::Bind(&MockCB::OnError, base::Unretained(&mock_)),
- base::Bind(&MockCB::OnWaitingForDecryptionKey, base::Unretained(&mock_)));
-
- EXPECT_CALL(mock_, OnInitialized(::media::PIPELINE_OK));
-}
-
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/test/media_component_device_feeder_for_test.cc b/chromecast/media/cma/test/media_component_device_feeder_for_test.cc
deleted file mode 100644
index 96ec02c..0000000
--- a/chromecast/media/cma/test/media_component_device_feeder_for_test.cc
+++ /dev/null
@@ -1,130 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "chromecast/media/cma/test/media_component_device_feeder_for_test.h"
-
-#include <list>
-#include <vector>
-
-#include "base/basictypes.h"
-#include "base/bind.h"
-#include "base/logging.h"
-#include "base/memory/ref_counted.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/message_loop/message_loop.h"
-#include "base/single_thread_task_runner.h"
-#include "base/thread_task_runner_handle.h"
-#include "base/time/time.h"
-#include "chromecast/media/cma/base/cast_decoder_buffer_impl.h"
-#include "chromecast/media/cma/base/decoder_buffer_adapter.h"
-#include "chromecast/media/cma/pipeline/frame_status_cb_impl.h"
-#include "chromecast/media/cma/pipeline/media_component_device_client_impl.h"
-#include "chromecast/media/cma/test/frame_segmenter_for_test.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
-#include "chromecast/public/media/cast_decoder_buffer.h"
-#include "chromecast/public/media/decrypt_context.h"
-#include "chromecast/public/media/media_clock_device.h"
-#include "chromecast/public/media/video_pipeline_device.h"
-#include "media/base/audio_decoder_config.h"
-#include "media/base/decoder_buffer.h"
-#include "media/base/video_decoder_config.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace chromecast {
-namespace media {
-
-MediaComponentDeviceFeederForTest::MediaComponentDeviceFeederForTest(
- MediaComponentDevice *device,
- const BufferList& frames)
- : media_component_device_(device),
- rendering_frame_idx_(1),
- clock_frame_idx_(1),
- feeding_completed_(false) {
- frames_ = frames;
-}
-
-MediaComponentDeviceFeederForTest::~MediaComponentDeviceFeederForTest() {
-}
-
-void MediaComponentDeviceFeederForTest::Initialize(
- const base::Closure& eos_cb) {
- eos_cb_ = eos_cb;
-
- media_component_device_->SetClient(
- new MediaComponentDeviceClientImpl(base::Bind(
- &MediaComponentDeviceFeederForTest::OnEos, base::Unretained(this))));
-
- bool success =
- media_component_device_->SetState(MediaComponentDevice::kStateIdle);
- ASSERT_TRUE(success);
- success = media_component_device_->SetStartPts(0);
- ASSERT_TRUE(success);
- success =
- media_component_device_->SetState(MediaComponentDevice::kStatePaused);
- ASSERT_TRUE(success);
-}
-
-void MediaComponentDeviceFeederForTest::Feed() {
- // Start rendering if needed.
- if (rendering_frame_idx_ == 0) {
- media_component_device_->SetState(MediaComponentDevice::kStateRunning);
- } else {
- rendering_frame_idx_--;
- }
-
- // Possibly feed one frame
- DCHECK(!frames_.empty());
- scoped_refptr<DecoderBufferBase> buffer = frames_.front();
-
- MediaComponentDevice::FrameStatus status = media_component_device_->PushFrame(
- nullptr, // decrypt_context
- new CastDecoderBufferImpl(buffer),
- new FrameStatusCBImpl(
- base::Bind(&MediaComponentDeviceFeederForTest::OnFramePushed,
- base::Unretained(this))));
- EXPECT_NE(status, MediaComponentDevice::kFrameFailed);
- frames_.pop_front();
-
- // Feeding is done, just wait for the end of stream callback.
- if (buffer->end_of_stream() || frames_.empty()) {
- if (frames_.empty() && !buffer->end_of_stream()) {
- LOG(WARNING) << "Stream emptied without feeding EOS frame";
- }
-
- feeding_completed_ = true;
- return;
- }
-
- if (status == MediaComponentDevice::kFramePending)
- return;
-
- OnFramePushed(MediaComponentDevice::kFrameSuccess);
-}
-
-void MediaComponentDeviceFeederForTest::OnFramePushed(
- MediaComponentDevice::FrameStatus status) {
- EXPECT_NE(status, MediaComponentDevice::kFrameFailed);
- if (feeding_completed_)
- return;
-
- base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::Bind(&MediaComponentDeviceFeederForTest::Feed,
- base::Unretained(this)));
-}
-
-void MediaComponentDeviceFeederForTest::OnEos() {
- bool success = media_component_device_->SetState(
- MediaComponentDevice::kStateIdle);
- ASSERT_TRUE(success);
- success = media_component_device_->SetState(
- MediaComponentDevice::kStateUninitialized);
- ASSERT_TRUE(success);
-
- if (!eos_cb_.is_null()) {
- eos_cb_.Run();
- }
-}
-
-} // namespace media
-} // namespace chromecast
diff --git a/chromecast/media/cma/test/media_component_device_feeder_for_test.h b/chromecast/media/cma/test/media_component_device_feeder_for_test.h
deleted file mode 100644
index cd28eb4..0000000
--- a/chromecast/media/cma/test/media_component_device_feeder_for_test.h
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_MEDIA_CMA_TEST_MEDIA_COMPONENT_DEVICE_FEEDER_FOR_TEST_H_
-#define CHROMECAST_MEDIA_CMA_TEST_MEDIA_COMPONENT_DEVICE_FEEDER_FOR_TEST_H_
-
-#include <list>
-#include <vector>
-
-#include "base/basictypes.h"
-#include "base/callback.h"
-#include "base/memory/ref_counted.h"
-#include "base/memory/scoped_ptr.h"
-#include "base/time/time.h"
-#include "chromecast/public/media/audio_pipeline_device.h"
-#include "chromecast/public/media/cast_decoder_buffer.h"
-#include "chromecast/public/media/media_clock_device.h"
-#include "chromecast/public/media/video_pipeline_device.h"
-
-namespace chromecast {
-namespace media {
-class DecoderBufferBase;
-
-typedef std::list<scoped_refptr<DecoderBufferBase> > BufferList;
-
-class MediaComponentDeviceFeederForTest {
- public:
- MediaComponentDeviceFeederForTest(
- MediaComponentDevice *device,
- const BufferList& frames);
-
- virtual ~MediaComponentDeviceFeederForTest();
-
- void Initialize(const base::Closure& eos_cb);
-
- // Feeds one frame into the pipeline.
- void Feed();
-
- private:
- void OnFramePushed(MediaComponentDevice::FrameStatus status);
-
- void OnEos();
-
- MediaComponentDevice *media_component_device_;
- BufferList frames_;
-
- // Frame index where the audio device is switching to the kStateRunning.
- int rendering_frame_idx_;
-
- // Frame index where the clock device is switching to the kStateRunning.
- int clock_frame_idx_;
-
- // Timing pattern to feed the pipeline.
- std::vector<base::TimeDelta> delayed_feed_pattern_;
- size_t delayed_feed_pattern_idx_;
-
- base::Closure eos_cb_;
-
- bool feeding_completed_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaComponentDeviceFeederForTest);
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_TEST_MEDIA_COMPONENT_DEVICE_FEEDER_FOR_TEST_H_
diff --git a/chromecast/media/media.gyp b/chromecast/media/media.gyp
index ad08911c..fe0fd7c 100644
--- a/chromecast/media/media.gyp
+++ b/chromecast/media/media.gyp
@@ -147,16 +147,8 @@
'../..',
],
'sources': [
- 'cma/backend/audio_pipeline_device_default.cc',
- 'cma/backend/audio_pipeline_device_default.h',
- 'cma/backend/media_clock_device_default.cc',
- 'cma/backend/media_clock_device_default.h',
- 'cma/backend/media_component_device_default.cc',
- 'cma/backend/media_component_device_default.h',
'cma/backend/media_pipeline_backend_default.cc',
'cma/backend/media_pipeline_backend_default.h',
- 'cma/backend/video_pipeline_device_default.cc',
- 'cma/backend/video_pipeline_device_default.h',
],
},
{
@@ -210,8 +202,6 @@
'../../third_party/boringssl/boringssl.gyp:boringssl',
],
'sources': [
- 'cma/pipeline/audio_pipeline.cc',
- 'cma/pipeline/audio_pipeline.h',
'cma/pipeline/audio_pipeline_impl.cc',
'cma/pipeline/audio_pipeline_impl.h',
'cma/pipeline/av_pipeline_client.cc',
@@ -220,49 +210,22 @@
'cma/pipeline/av_pipeline_impl.h',
'cma/pipeline/decrypt_util.cc',
'cma/pipeline/decrypt_util.h',
- 'cma/pipeline/frame_status_cb_impl.cc',
- 'cma/pipeline/frame_status_cb_impl.h',
'cma/pipeline/load_type.h',
- 'cma/pipeline/media_component_device_client_impl.cc',
- 'cma/pipeline/media_component_device_client_impl.h',
- 'cma/pipeline/media_pipeline.h',
'cma/pipeline/media_pipeline_client.cc',
'cma/pipeline/media_pipeline_client.h',
'cma/pipeline/media_pipeline_impl.cc',
'cma/pipeline/media_pipeline_impl.h',
- 'cma/pipeline/video_pipeline.cc',
- 'cma/pipeline/video_pipeline.h',
'cma/pipeline/video_pipeline_client.cc',
'cma/pipeline/video_pipeline_client.h',
- 'cma/pipeline/video_pipeline_device_client_impl.cc',
- 'cma/pipeline/video_pipeline_device_client_impl.h',
'cma/pipeline/video_pipeline_impl.cc',
'cma/pipeline/video_pipeline_impl.h',
],
},
{
- 'target_name': 'cma_filters',
- 'type': '<(component)',
- 'dependencies': [
- '../../base/base.gyp:base',
- '../../media/media.gyp:media',
- 'cma_base',
- ],
- 'sources': [
- 'cma/filters/cma_renderer.cc',
- 'cma/filters/cma_renderer.h',
- 'cma/filters/demuxer_stream_adapter.cc',
- 'cma/filters/demuxer_stream_adapter.h',
- 'cma/filters/hole_frame_factory.cc',
- 'cma/filters/hole_frame_factory.h',
- ],
- },
- {
'target_name': 'cast_media',
'type': 'none',
'dependencies': [
'cma_base',
- 'cma_filters',
'cma_ipc',
'cma_ipc_streamer',
'cma_pipeline',
@@ -294,21 +257,14 @@
'cma/base/balanced_media_task_runner_unittest.cc',
'cma/base/buffering_controller_unittest.cc',
'cma/base/buffering_frame_provider_unittest.cc',
- 'cma/filters/demuxer_stream_adapter_unittest.cc',
- 'cma/filters/multi_demuxer_stream_adapter_unittest.cc',
'cma/ipc/media_message_fifo_unittest.cc',
'cma/ipc/media_message_unittest.cc',
'cma/ipc_streamer/av_streamer_unittest.cc',
'cma/pipeline/audio_video_pipeline_impl_unittest.cc',
- 'cma/test/cma_end_to_end_test.cc',
- 'cma/test/demuxer_stream_for_test.cc',
- 'cma/test/demuxer_stream_for_test.h',
'cma/test/frame_generator_for_test.cc',
'cma/test/frame_generator_for_test.h',
'cma/test/frame_segmenter_for_test.cc',
'cma/test/frame_segmenter_for_test.h',
- 'cma/test/media_component_device_feeder_for_test.cc',
- 'cma/test/media_component_device_feeder_for_test.h',
'cma/test/mock_frame_consumer.cc',
'cma/test/mock_frame_consumer.h',
'cma/test/mock_frame_provider.cc',
diff --git a/chromecast/public/cast_media_shlib.h b/chromecast/public/cast_media_shlib.h
index d523db88..6360aab 100644
--- a/chromecast/public/cast_media_shlib.h
+++ b/chromecast/public/cast_media_shlib.h
@@ -42,9 +42,9 @@ class CHROMECAST_EXPORT CastMediaShlib {
// called at any time. The VideoPlane object must be destroyed in Finalize.
static VideoPlane* GetVideoPlane();
- // Creates a factory object for a media pipeline backend. Called in the
- // browser process, any number of times (once per media pipeline). Each call
- // must instantiate a new factory object
+ // Creates a media pipeline backend. Called in the browser process for each
+ // media pipeline and raw audio stream. The caller owns the returned
+ // MediaPipelineBackend instance.
static MediaPipelineBackend* CreateMediaPipelineBackend(
const MediaPipelineDeviceParams& params);
};
diff --git a/chromecast/public/media/BUILD.gn b/chromecast/public/media/BUILD.gn
index b6f699f..3db81e0 100644
--- a/chromecast/public/media/BUILD.gn
+++ b/chromecast/public/media/BUILD.gn
@@ -4,17 +4,13 @@
source_set("media") {
sources = [
- "audio_pipeline_device.h",
"cast_decoder_buffer.h",
"cast_decrypt_config.h",
"cast_key_system.h",
"decoder_config.h",
- "media_clock_device.h",
- "media_component_device.h",
"media_pipeline_backend.h",
"media_pipeline_device_params.h",
"stream_id.h",
- "video_pipeline_device.h",
]
public_configs = [ ":public_headers" ]
diff --git a/chromecast/public/media/audio_pipeline_device.h b/chromecast/public/media/audio_pipeline_device.h
deleted file mode 100644
index 02fadeb..0000000
--- a/chromecast/public/media/audio_pipeline_device.h
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_PUBLIC_MEDIA_AUDIO_PIPELINE_DEVICE_H_
-#define CHROMECAST_PUBLIC_MEDIA_AUDIO_PIPELINE_DEVICE_H_
-
-#include "media_component_device.h"
-
-namespace chromecast {
-namespace media {
-class AudioPipelineDeviceClient;
-struct AudioConfig;
-
-// Interface for platform-specific audio pipeline backend.
-// See comments on MediaComponentDevice.
-class AudioPipelineDevice : public MediaComponentDevice {
- public:
- ~AudioPipelineDevice() override {}
-
- // Provides the audio configuration.
- // Will be called before switching from |kStateUninitialized| to |kStateIdle|.
- // Afterwards, this can be invoked any time the configuration changes.
- // Returns true if the configuration is a supported configuration.
- virtual bool SetConfig(const AudioConfig& config) = 0;
-
- // Sets the volume multiplier.
- // The multiplier is in the range [0.0, 1.0].
- virtual void SetStreamVolumeMultiplier(float multiplier) = 0;
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_AUDIO_PIPELINE_DEVICE_H_
diff --git a/chromecast/public/media/media_clock_device.h b/chromecast/public/media/media_clock_device.h
deleted file mode 100644
index 7bb945e..0000000
--- a/chromecast/public/media/media_clock_device.h
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_PUBLIC_MEDIA_MEDIA_CLOCK_DEVICE_H_
-#define CHROMECAST_PUBLIC_MEDIA_MEDIA_CLOCK_DEVICE_H_
-
-#include <string>
-
-namespace chromecast {
-namespace media {
-
-// Interface for platform-specific pipeline clock.
-// Pipeline clocks follow this state machine:
-// -------------------
-// | |
-// v |
-// kUninitialized --> kIdle --------- kRunning
-//
-// {any state} --> kError
-//
-// Notes:
-// - Hardware resources should be acquired when transitioning from the
-// |kUninitialized| state to the |kIdle| state.
-// - The initial value of the timeline will only be set in the kIdle state.
-class MediaClockDevice {
- public:
- enum State {
- kStateUninitialized,
- kStateIdle,
- kStateRunning,
- kStateError,
- };
-
- virtual ~MediaClockDevice() {}
-
- // Returns the current state of the media clock.
- virtual State GetState() const = 0;
-
- // Changes the state and performs any necessary transitions.
- // Returns true when successful.
- virtual bool SetState(State new_state) = 0;
-
- // Sets the initial value of the timeline in microseconds.
- // Will only be invoked in state kStateIdle.
- // Returns true when successful.
- virtual bool ResetTimeline(int64_t time_microseconds) = 0;
-
- // Sets the clock rate.
- // |rate| == 0 means the clock is not progressing and that the renderer
- // tied to this media clock should pause rendering.
- // Will only be invoked in states kStateIdle or kStateRunning.
- virtual bool SetRate(float rate) = 0;
-
- // Retrieves the media clock time in microseconds.
- // Will only be invoked in states kStateIdle or kStateRunning.
- virtual int64_t GetTimeMicroseconds() = 0;
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_CLOCK_DEVICE_H_
diff --git a/chromecast/public/media/media_component_device.h b/chromecast/public/media/media_component_device.h
deleted file mode 100644
index b6f6255..0000000
--- a/chromecast/public/media/media_component_device.h
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_PUBLIC_MEDIA_MEDIA_COMPONENT_DEVICE_H_
-#define CHROMECAST_PUBLIC_MEDIA_MEDIA_COMPONENT_DEVICE_H_
-
-#include <stdint.h>
-#include <string>
-
-#include "cast_key_system.h"
-
-namespace chromecast {
-namespace media {
-class CastDecoderBuffer;
-class DecryptContext;
-
-// Common base interface for both platform-specific audio and video pipeline
-// backends. Both follow this state machine:
-// +------------- kRunning <--+
-// | ^ |
-// v | |
-// kUninitialized <--> kIdle -------------+ |
-// ^ | |
-// | v |
-// +------------- kPaused <---+
-// {any state} --> kError
-// kError --> kUninitialized
-//
-// Notes:
-// - Hardware resources should be acquired when transitioning from the
-// |kUninitialized| state to the |kIdle| state.
-// - Buffers will be pushed only in the kRunning or kPaused states.
-// - The end of stream is signaled through a special buffer.
-// Once the end of stream buffer is fed, no other buffer
-// will be fed until the FSM goes through the kIdle state again.
-// - In both kPaused and kRunning states, frames can be fed.
-// However, frames are possibly rendered only in the kRunning state.
-// - In the kRunning state, frames are rendered according to the clock rate.
-// - All the hardware resources must be released in the |kError| state.
-class MediaComponentDevice {
- public:
- enum State {
- kStateUninitialized,
- kStateIdle,
- kStateRunning,
- kStatePaused,
- kStateError,
- };
-
- enum FrameStatus {
- kFrameSuccess,
- kFrameFailed,
- kFramePending,
- };
-
- // Interface for receiving status when PushFrame has completed.
- class FrameStatusCB {
- public:
- virtual ~FrameStatusCB() {}
- virtual void Run(FrameStatus status) = 0;
- };
-
- // Client callbacks interface
- class Client {
- public:
- virtual ~Client() {}
- virtual void OnEndOfStream() = 0;
- };
-
- // The statistics are computed since the media component left the idle state.
- // For video, a sample is defined as a frame.
- struct Statistics {
- uint64_t decoded_bytes;
- uint64_t decoded_samples;
- uint64_t dropped_samples;
- };
-
- // Info on pipeline latency: amount of data in pipeline not rendered yet,
- // and timestamp of system clock (must be CLOCK_MONOTONIC) at which delay
- // measurement was taken. Both times in microseconds.
- struct RenderingDelay {
- RenderingDelay()
- : delay_microseconds(INT64_MIN), timestamp_microseconds(INT64_MIN) {}
- RenderingDelay(int64_t delay_microseconds_in,
- int64_t timestamp_microseconds_in)
- : delay_microseconds(delay_microseconds_in),
- timestamp_microseconds(timestamp_microseconds_in) {}
- int64_t delay_microseconds;
- int64_t timestamp_microseconds;
- };
-
- virtual ~MediaComponentDevice() {}
-
- // Registers |client| as the media event handler. Implementation
- // takes ownership of |client| and call OnEndOfStream when an end-of-stream
- // buffer is processed.
- virtual void SetClient(Client* client) = 0;
-
- // Changes the state and performs any necessary transitions.
- // Returns true when successful.
- virtual bool SetState(State new_state) = 0;
-
- // Returns the current state of the media component.
- virtual State GetState() const = 0;
-
- // Sets the time where rendering should start.
- // Return true when successful.
- // Will only be invoked in state kStateIdle.
- virtual bool SetStartPts(int64_t microseconds) = 0;
-
- // Pushes a frame. If the implementation cannot push the buffer
- // now, it must store the buffer, return |kFramePending| and execute the push
- // at a later time when it becomes possible to do so. The implementation must
- // then invoke |completion_cb|. Pushing a pending frame should be aborted if
- // the state returns to kStateIdle, and |completion_cb| need not be invoked.
- // If |kFramePending| is returned, the pipeline will stop pushing any further
- // buffers until the |completion_cb| is invoked.
- // Ownership: |decrypt_context|, |buffer|, |completion_cb| are all owned by
- // the implementation and must be deleted once no longer needed (even in the
- // case where |completion_cb| is not called).
- // |completion_cb| should be only be invoked to indicate completion of a
- // pending buffer push - not for the immediate |kFrameSuccess| return case.
- virtual FrameStatus PushFrame(DecryptContext* decrypt_context,
- CastDecoderBuffer* buffer,
- FrameStatusCB* completion_cb) = 0;
-
- // Returns the pipeline latency: i.e. the amount of data
- // in the pipeline that have not been rendered yet, in microseconds.
- // Returns delay = INT64_MIN if the latency is not available.
- virtual RenderingDelay GetRenderingDelay() const = 0;
-
- // Returns the playback statistics since the last transition from idle state.
- // Returns true when successful.
- // Is only invoked in state kStateRunning.
- virtual bool GetStatistics(Statistics* stats) const = 0;
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_COMPONENT_DEVICE_H_
diff --git a/chromecast/public/media/media_pipeline_backend.h b/chromecast/public/media/media_pipeline_backend.h
index cf64acc..741c2b7 100644
--- a/chromecast/public/media/media_pipeline_backend.h
+++ b/chromecast/public/media/media_pipeline_backend.h
@@ -5,35 +5,195 @@
#ifndef CHROMECAST_PUBLIC_MEDIA_MEDIA_PIPELINE_BACKEND_H_
#define CHROMECAST_PUBLIC_MEDIA_MEDIA_PIPELINE_BACKEND_H_
+#include "decoder_config.h"
+
namespace chromecast {
-namespace media {
+struct Size;
-class AudioPipelineDevice;
-class MediaClockDevice;
-struct MediaPipelineDeviceParams;
-class VideoPipelineDevice;
+namespace media {
+class CastDecoderBuffer;
+class DecryptContext;
-// Interface for creating and managing ownership of platform-specific clock,
-// audio and video devices. cast_shell owns the MediaPipelineBackend for
-// as long as it is needed; the implementation is responsible for
-// tearing down the individual components correctly when it is destroyed.
+// Interface for platform-specific output of media.
// A new MediaPipelineBackend will be instantiated for each media player
-// instance.
+// instance and raw audio stream. If a backend has both video and audio
+// decoders, they must be synchronized.
+// If more backends are requested than the platform supports, the unsupported
+// extra backends may return nullptr for CreateAudioDecoder/CreateVideoDecoder.
+// The basic usage pattern is:
+// * Decoder objects created, then Initialize called
+// * Start/Stop/Pause/Resume used to manage playback state
+// * Decoder objects are used to pass actual stream data buffers
+// * Backend must make appropriate callbacks on the provided Delegate
+// All functions will be called on the media thread. Delegate callbacks
+// must be made on this thread also (using provided TaskRunner if necessary).
class MediaPipelineBackend {
public:
+ // Return code for PushBuffer
+ enum BufferStatus {
+ kBufferSuccess,
+ kBufferFailed,
+ kBufferPending,
+ };
+
+ class Decoder {
+ public:
+ typedef MediaPipelineBackend::BufferStatus BufferStatus;
+
+ // Statistics (computed since pipeline last started playing).
+ // For video, a sample is defined as a frame.
+ struct Statistics {
+ uint64_t decoded_bytes;
+ uint64_t decoded_samples;
+ uint64_t dropped_samples;
+ };
+
+ // Pushes a buffer of data for decoding and output. If the implementation
+ // cannot push the buffer now, it must store the buffer, return
+ // |kBufferPending| and execute the push at a later time when it becomes
+ // possible to do so. The implementation must then invoke
+ // Client::OnPushComplete. Pushing a pending buffer should be aborted if
+ // Stop is called; OnPushAudioComplete need not be invoked in this case.
+ // If |kBufferPending| is returned, the pipeline will stop pushing any
+ // further buffers until OnPushComplete is invoked.
+ // OnPushComplete should be only be invoked to indicate completion of a
+ // pending buffer push - not for the immediate |kBufferSuccess| return case.
+ // The decrypt_context and buffer's lifetimes are managed by the caller code
+ // - they MUST NOT be deleted by the MediaPipelineBackend implementation,
+ // and MUST NOT be dereferenced after completion of buffer push (i.e.
+ // kBufferSuccess/kBufferFailure for synchronous completion, OnPushComplete
+ // for kBufferPending case).
+ virtual BufferStatus PushBuffer(DecryptContext* decrypt_context,
+ CastDecoderBuffer* buffer) = 0;
+
+ // Returns the playback statistics since this decoder's creation. Only
+ // called when playing or paused.
+ virtual void GetStatistics(Statistics* statistics) = 0;
+
+ protected:
+ virtual ~Decoder() {}
+ };
+
+ class AudioDecoder : public Decoder {
+ public:
+ // Info on pipeline latency: amount of data in pipeline not rendered yet,
+ // and timestamp of system clock (must be CLOCK_MONOTONIC) at which delay
+ // measurement was taken. Both times in microseconds.
+ struct RenderingDelay {
+ RenderingDelay()
+ : delay_microseconds(INT64_MIN), timestamp_microseconds(INT64_MIN) {}
+ RenderingDelay(int64_t delay_microseconds_in,
+ int64_t timestamp_microseconds_in)
+ : delay_microseconds(delay_microseconds_in),
+ timestamp_microseconds(timestamp_microseconds_in) {}
+ int64_t delay_microseconds;
+ int64_t timestamp_microseconds;
+ };
+
+ // Provides the audio configuration. Called once before the backend is
+ // initialized, and again any time the configuration changes (in any state).
+ // Returns true if the configuration is a supported configuration.
+ virtual bool SetConfig(const AudioConfig& config) = 0;
+
+ // Sets the volume multiplier for this audio stream.
+ // The multiplier is in the range [0.0, 1.0]. If not called, a default
+ // multiplier of 1.0 is assumed. Returns true if successful.
+ // Only called after the backend has been initialized.
+ virtual bool SetVolume(float multiplier) = 0;
+
+ // Returns the pipeline latency: i.e. the amount of data
+ // in the pipeline that have not been rendered yet, in microseconds.
+ // Returns delay = INT64_MIN if the latency is not available.
+ // Only called when the backend is playing.
+ virtual RenderingDelay GetRenderingDelay() = 0;
+
+ protected:
+ ~AudioDecoder() override {}
+ };
+
+ class VideoDecoder : public Decoder {
+ public:
+ // Provides the video configuration. Called once before the backend is
+ // initialized, and again any time the configuration changes (in any state).
+ // Returns true if the configuration is a supported configuration.
+ virtual bool SetConfig(const VideoConfig& config) = 0;
+
+ protected:
+ ~VideoDecoder() override {}
+ };
+
+ // Delegate methods must be called on the main CMA thread.
+ class Delegate {
+ public:
+ // Must be called when video resolution change is detected by decoder.
+ virtual void OnVideoResolutionChanged(VideoDecoder* decoder,
+ const Size& size) = 0;
+
+ // See comments on PushBuffer. Must not be called with kBufferPending.
+ virtual void OnPushBufferComplete(Decoder* decoder,
+ BufferStatus status) = 0;
+
+ // Must be called after an end-of-stream buffer has been rendered (ie, the
+ // last real buffer has been sent to the output hardware).
+ virtual void OnEndOfStream(Decoder* decoder) = 0;
+
+ // May be called if a decoder error occurs. No more calls to PushBuffer()
+ // will be made after this is called.
+ virtual void OnDecoderError(Decoder* decoder) = 0;
+
+ protected:
+ virtual ~Delegate() {}
+ };
+
virtual ~MediaPipelineBackend() {}
- // Returns the platform-specific pipeline clock.
- virtual MediaClockDevice* GetClock() = 0;
+ // Creates a new AudioDecoder attached to this pipeline. MediaPipelineBackend
+ // maintains ownership of the decoder object (and must not delete before it's
+ // destroyed). Will be called zero or more times, all calls made before
+ // Initialize. May return nullptr if the platform implementation cannot
+ // support any additional simultaneous playback at this time.
+ virtual AudioDecoder* CreateAudioDecoder() = 0;
+
+ // Creates a new VideoDecoder attached to this pipeline. MediaPipelineBackend
+ // maintains ownership of the decoder object (and must not delete before it's
+ // destroyed). Will be called zero or more times, all calls made before
+ // Initialize. Note: Even if your backend only supports audio, you must
+ // provide a default implementation of VideoDecoder; one way to do this is to
+ // inherit from MediaPipelineBackendDefault. May return nullptr if the
+ // platform implementation cannot support any additional simultaneous playback
+ // at this time.
+ virtual VideoDecoder* CreateVideoDecoder() = 0;
+
+ // Initializes the backend. This will be called once, after Decoder creation
+ // but before all other functions. Hardware resources for all decoders should
+ // be acquired here. Backend is then considered in Initialized state.
+ // Returns false for failure.
+ virtual bool Initialize(Delegate* delegate) = 0;
+
+ // Places pipeline into playing state. Playback will start at given time once
+ // buffers are pushed. Called only when in Initialized state. |start_pts| is
+ // the start playback timestamp in microseconds.
+ virtual bool Start(int64_t start_pts) = 0;
+
+ // Returns pipeline to 'Initialized' state. May be called while playing or
+ // paused. Buffers cannot be pushed in Initialized state.
+ virtual bool Stop() = 0;
+
+ // Pauses media playback. Called only when in playing state.
+ virtual bool Pause() = 0;
+
+ // Resumes media playback. Called only when in paused state.
+ virtual bool Resume() = 0;
- // Returns the platform-specific audio backend.
- virtual AudioPipelineDevice* GetAudio() = 0;
+ // Gets the current playback timestamp in microseconds.
+ virtual int64_t GetCurrentPts() = 0;
- // Returns the platform-specific video backend.
- virtual VideoPipelineDevice* GetVideo() = 0;
+ // Sets the playback rate. |rate| > 0. If this is not called, a default rate
+ // of 1.0 is assumed. Returns true if successful.
+ virtual bool SetPlaybackRate(float rate) = 0;
};
} // namespace media
} // namespace chromecast
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_MEDIA_PIPELINE_DEVICE_FACTORY_H_
+#endif // CHROMECAST_PUBLIC_MEDIA_MEDIA_PIPELINE_BACKEND_H_
diff --git a/chromecast/public/media/media_pipeline_device_params.h b/chromecast/public/media/media_pipeline_device_params.h
index fa7779b..b307dc4 100644
--- a/chromecast/public/media/media_pipeline_device_params.h
+++ b/chromecast/public/media/media_pipeline_device_params.h
@@ -5,9 +5,9 @@
#ifndef CHROMECAST_PUBLIC_MEDIA_MEDIA_PIPELINE_DEVICE_PARAMS_H_
#define CHROMECAST_PUBLIC_MEDIA_MEDIA_PIPELINE_DEVICE_PARAMS_H_
-#include "task_runner.h"
-
namespace chromecast {
+class TaskRunner;
+
namespace media {
// Supplies creation parameters to platform-specific pipeline backend.
diff --git a/chromecast/public/media/video_pipeline_device.h b/chromecast/public/media/video_pipeline_device.h
deleted file mode 100644
index 1019ec3..0000000
--- a/chromecast/public/media/video_pipeline_device.h
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef CHROMECAST_PUBLIC_MEDIA_VIDEO_PIPELINE_DEVICE_H_
-#define CHROMECAST_PUBLIC_MEDIA_VIDEO_PIPELINE_DEVICE_H_
-
-#include "media_component_device.h"
-
-namespace chromecast {
-struct Size;
-
-namespace media {
-struct VideoConfig;
-
-// Interface for platform-specific video pipeline backend.
-// See comments on MediaComponentDevice.
-//
-// Notes:
-// - Like a regular MediaComponentDevice, frames are possibly rendered only
-// in the kRunning state.
-// However, the first frame must be rendered regardless of the clock state:
-// - no synchronization needed to display the first frame,
-// - the clock rate has no impact on the presentation of the first frame.
-class VideoPipelineDevice : public MediaComponentDevice {
- public:
- // Callback interface for natural size of video changing.
- class VideoClient {
- public:
- virtual ~VideoClient() {}
- virtual void OnNaturalSizeChanged(const Size& size) = 0;
- };
-
- ~VideoPipelineDevice() override {}
-
- // Registers |client| as the video specific event handler.
- // Implementation takes ownership of |client|.
- virtual void SetVideoClient(VideoClient* client) = 0;
-
- // Provides the video configuration.
- // Called before switching from |kStateUninitialized| to |kStateIdle|.
- // Afterwards, this can be invoked any time the configuration changes.
- // Returns true if the configuration is a supported configuration.
- virtual bool SetConfig(const VideoConfig& config) = 0;
-};
-
-} // namespace media
-} // namespace chromecast
-
-#endif // CHROMECAST_MEDIA_CMA_BACKEND_VIDEO_PIPELINE_DEVICE_H_
diff --git a/chromecast/renderer/media/BUILD.gn b/chromecast/renderer/media/BUILD.gn
index 6bbba52..38efe6a 100644
--- a/chromecast/renderer/media/BUILD.gn
+++ b/chromecast/renderer/media/BUILD.gn
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//testing/test.gni")
+
source_set("media") {
sources = [
"audio_pipeline_proxy.cc",
@@ -12,6 +14,12 @@ source_set("media") {
"chromecast_media_renderer_factory.h",
"cma_message_filter_proxy.cc",
"cma_message_filter_proxy.h",
+ "cma_renderer.cc",
+ "cma_renderer.h",
+ "demuxer_stream_adapter.cc",
+ "demuxer_stream_adapter.h",
+ "hole_frame_factory.cc",
+ "hole_frame_factory.h",
"media_channel_proxy.cc",
"media_channel_proxy.h",
"media_pipeline_proxy.cc",
@@ -29,3 +37,24 @@ source_set("media") {
"//media",
]
}
+
+# GYP target: chromecast_tests.gypi:cast_renderer_media_unittests
+test("cast_renderer_media_unittests") {
+ sources = [
+ "demuxer_stream_adapter_unittest.cc",
+ "demuxer_stream_for_test.cc",
+ "demuxer_stream_for_test.h",
+ "multi_demuxer_stream_adapter_unittest.cc",
+ ]
+
+ deps = [
+ ":media",
+ "//base",
+ "//base/test:run_all_unittests",
+ "//base/test:test_support",
+ "//chromecast/media/cma/base",
+ "//chromecast/public/media",
+ "//media",
+ "//testing/gtest:gtest",
+ ]
+}
diff --git a/chromecast/renderer/media/DEPS b/chromecast/renderer/media/DEPS
new file mode 100644
index 0000000..9be0bc0
--- /dev/null
+++ b/chromecast/renderer/media/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+gpu",
+]
diff --git a/chromecast/renderer/media/audio_pipeline_proxy.cc b/chromecast/renderer/media/audio_pipeline_proxy.cc
index 0193232..ed76855 100644
--- a/chromecast/renderer/media/audio_pipeline_proxy.cc
+++ b/chromecast/renderer/media/audio_pipeline_proxy.cc
@@ -118,9 +118,8 @@ void AudioPipelineProxyInternal::NotifyPipeWrite() {
VLOG_IF(4, !success) << "Sending msg failed";
}
-void AudioPipelineProxyInternal::SetClient(
- const base::Closure& pipe_read_cb,
- const AvPipelineClient& client) {
+void AudioPipelineProxyInternal::SetClient(const base::Closure& pipe_read_cb,
+ const AvPipelineClient& client) {
DCHECK(thread_checker_.CalledOnValidThread());
CmaMessageFilterProxy::AudioDelegate delegate;
@@ -221,8 +220,7 @@ AudioPipelineProxy::~AudioPipelineProxy() {
base::Bind(&AudioPipelineProxyInternal::Release, base::Passed(&proxy_)));
}
-void AudioPipelineProxy::SetClient(
- const AvPipelineClient& client) {
+void AudioPipelineProxy::SetClient(const AvPipelineClient& client) {
DCHECK(thread_checker_.CalledOnValidThread());
base::Closure pipe_read_cb = ::media::BindToCurrentLoop(
base::Bind(&AudioPipelineProxy::OnPipeRead, weak_this_));
@@ -306,4 +304,4 @@ void AudioPipelineProxy::OnPipeRead() {
}
} // namespace cma
-} // namespace chromecast \ No newline at end of file
+} // namespace chromecast
diff --git a/chromecast/renderer/media/audio_pipeline_proxy.h b/chromecast/renderer/media/audio_pipeline_proxy.h
index 7bcc685..87e0d18 100644
--- a/chromecast/renderer/media/audio_pipeline_proxy.h
+++ b/chromecast/renderer/media/audio_pipeline_proxy.h
@@ -11,7 +11,6 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
#include "chromecast/common/media/cma_ipc_common.h"
-#include "chromecast/media/cma/pipeline/audio_pipeline.h"
#include "media/base/pipeline_status.h"
namespace base {
@@ -26,16 +25,16 @@ class AudioDecoderConfig;
namespace chromecast {
namespace media {
class AudioPipelineProxyInternal;
-struct AvPipelineClient;
class AvStreamerProxy;
class CodedFrameProvider;
class MediaChannelProxy;
+struct AvPipelineClient;
-class AudioPipelineProxy : public AudioPipeline {
+class AudioPipelineProxy {
public:
AudioPipelineProxy(scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
scoped_refptr<MediaChannelProxy> media_channel_proxy);
- ~AudioPipelineProxy() override;
+ ~AudioPipelineProxy();
void Initialize(
const ::media::AudioDecoderConfig& config,
@@ -45,9 +44,8 @@ class AudioPipelineProxy : public AudioPipeline {
void Flush(const base::Closure& done_cb);
void Stop();
- // AudioPipeline implementation.
- void SetClient(const AvPipelineClient& client) override;
- void SetVolume(float volume) override;
+ void SetClient(const AvPipelineClient& client);
+ void SetVolume(float volume);
private:
base::ThreadChecker thread_checker_;
@@ -75,4 +73,4 @@ class AudioPipelineProxy : public AudioPipeline {
} // namespace media
} // namespace chromecast
-#endif // CHROMECAST_RENDERER_MEDIA_AUDIO_PIPELINE_PROXY_H_ \ No newline at end of file
+#endif // CHROMECAST_RENDERER_MEDIA_AUDIO_PIPELINE_PROXY_H_
diff --git a/chromecast/renderer/media/chromecast_media_renderer_factory.cc b/chromecast/renderer/media/chromecast_media_renderer_factory.cc
index 3a43e91..5dc6b35 100644
--- a/chromecast/renderer/media/chromecast_media_renderer_factory.cc
+++ b/chromecast/renderer/media/chromecast_media_renderer_factory.cc
@@ -6,7 +6,7 @@
#include "base/command_line.h"
#include "chromecast/media/base/switching_media_renderer.h"
-#include "chromecast/media/cma/filters/cma_renderer.h"
+#include "chromecast/renderer/media/cma_renderer.h"
#include "chromecast/renderer/media/media_pipeline_proxy.h"
#include "content/public/renderer/render_thread.h"
#include "media/base/audio_hardware_config.h"
@@ -34,7 +34,7 @@ scoped_ptr<::media::Renderer> ChromecastMediaRendererFactory::CreateRenderer(
const scoped_refptr<base::TaskRunner>& worker_task_runner,
::media::AudioRendererSink* audio_renderer_sink,
::media::VideoRendererSink* video_renderer_sink) {
- if (!default_render_factory_) {
+ if (!default_renderer_factory_) {
// Chromecast doesn't have input audio devices, so leave this uninitialized
::media::AudioParameters input_audio_params;
// TODO(servolk): Audio parameters are hardcoded for now, but in the future
@@ -52,26 +52,26 @@ scoped_ptr<::media::Renderer> ChromecastMediaRendererFactory::CreateRenderer(
audio_config_.reset(new ::media::AudioHardwareConfig(input_audio_params,
output_audio_params));
- default_render_factory_.reset(new ::media::DefaultRendererFactory(
+ default_renderer_factory_.reset(new ::media::DefaultRendererFactory(
media_log_, /*gpu_factories*/ nullptr, *audio_config_));
}
- DCHECK(default_render_factory_);
+ DCHECK(default_renderer_factory_);
// TODO(erickung): crbug.com/443956. Need to provide right LoadType.
LoadType cma_load_type = kLoadTypeMediaSource;
- scoped_ptr<MediaPipeline> cma_media_pipeline(
- new MediaPipelineProxy(
- render_frame_id_,
- content::RenderThread::Get()->GetIOMessageLoopProxy(),
- cma_load_type));
+ scoped_ptr<MediaPipelineProxy> cma_media_pipeline(new MediaPipelineProxy(
+ render_frame_id_,
+ content::RenderThread::Get()->GetIOMessageLoopProxy(),
+ cma_load_type));
scoped_ptr<CmaRenderer> cma_renderer(new CmaRenderer(
cma_media_pipeline.Pass(), video_renderer_sink, gpu_factories_));
- scoped_ptr<::media::Renderer> default_media_render(
- default_render_factory_->CreateRenderer(
- media_task_runner, media_task_runner, audio_renderer_sink,
- video_renderer_sink));
+ scoped_ptr<::media::Renderer> default_media_renderer(
+ default_renderer_factory_->CreateRenderer(media_task_runner,
+ media_task_runner,
+ audio_renderer_sink,
+ video_renderer_sink));
scoped_ptr<SwitchingMediaRenderer> media_renderer(new SwitchingMediaRenderer(
- default_media_render.Pass(), cma_renderer.Pass()));
+ default_media_renderer.Pass(), cma_renderer.Pass()));
return media_renderer.Pass();
}
diff --git a/chromecast/renderer/media/chromecast_media_renderer_factory.h b/chromecast/renderer/media/chromecast_media_renderer_factory.h
index d4da512..76c884f 100644
--- a/chromecast/renderer/media/chromecast_media_renderer_factory.h
+++ b/chromecast/renderer/media/chromecast_media_renderer_factory.h
@@ -38,7 +38,7 @@ class ChromecastMediaRendererFactory : public ::media::RendererFactory {
int render_frame_id_;
scoped_refptr<::media::GpuVideoAcceleratorFactories> gpu_factories_;
scoped_refptr<::media::MediaLog> media_log_;
- scoped_ptr<::media::DefaultRendererFactory> default_render_factory_;
+ scoped_ptr<::media::DefaultRendererFactory> default_renderer_factory_;
// Audio config for the default media renderer.
scoped_ptr<::media::AudioHardwareConfig> audio_config_;
diff --git a/chromecast/media/cma/filters/cma_renderer.cc b/chromecast/renderer/media/cma_renderer.cc
index 1d9dadd..922f5c9 100644
--- a/chromecast/media/cma/filters/cma_renderer.cc
+++ b/chromecast/renderer/media/cma_renderer.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "chromecast/media/cma/filters/cma_renderer.h"
+#include "chromecast/renderer/media/cma_renderer.h"
#include "base/bind.h"
#include "base/callback_helpers.h"
@@ -11,14 +11,14 @@
#include "base/thread_task_runner_handle.h"
#include "chromecast/media/cma/base/balanced_media_task_runner_factory.h"
#include "chromecast/media/cma/base/cma_logging.h"
-#include "chromecast/media/cma/filters/demuxer_stream_adapter.h"
-#include "chromecast/media/cma/filters/hole_frame_factory.h"
-#include "chromecast/media/cma/pipeline/audio_pipeline.h"
#include "chromecast/media/cma/pipeline/av_pipeline_client.h"
-#include "chromecast/media/cma/pipeline/media_pipeline.h"
#include "chromecast/media/cma/pipeline/media_pipeline_client.h"
-#include "chromecast/media/cma/pipeline/video_pipeline.h"
#include "chromecast/media/cma/pipeline/video_pipeline_client.h"
+#include "chromecast/renderer/media/audio_pipeline_proxy.h"
+#include "chromecast/renderer/media/demuxer_stream_adapter.h"
+#include "chromecast/renderer/media/hole_frame_factory.h"
+#include "chromecast/renderer/media/media_pipeline_proxy.h"
+#include "chromecast/renderer/media/video_pipeline_proxy.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/demuxer_stream_provider.h"
#include "media/base/pipeline_status.h"
@@ -34,15 +34,15 @@ namespace {
// Maximum difference between audio frame PTS and video frame PTS
// for frames read from the DemuxerStream.
-const base::TimeDelta kMaxDeltaFetcher(
- base::TimeDelta::FromMilliseconds(2000));
+const base::TimeDelta kMaxDeltaFetcher(base::TimeDelta::FromMilliseconds(2000));
-void MediaPipelineClientDummyCallback() {}
+void MediaPipelineClientDummyCallback() {
+}
} // namespace
CmaRenderer::CmaRenderer(
- scoped_ptr<MediaPipeline> media_pipeline,
+ scoped_ptr<MediaPipelineProxy> media_pipeline,
::media::VideoRendererSink* video_renderer_sink,
const scoped_refptr<::media::GpuVideoAcceleratorFactories>& gpu_factories)
: media_task_runner_factory_(
@@ -122,8 +122,8 @@ void CmaRenderer::Initialize(
base::Bind(&CmaRenderer::OnPlaybackTimeUpdated, weak_this_));
media_pipeline_client.pipeline_backend_created_cb =
base::Bind(&MediaPipelineClientDummyCallback);
- media_pipeline_client.pipeline_backend_destroyed_cb
- = base::Bind(&MediaPipelineClientDummyCallback);
+ media_pipeline_client.pipeline_backend_destroyed_cb =
+ base::Bind(&MediaPipelineClientDummyCallback);
media_pipeline_->SetClient(media_pipeline_client);
init_cb_ = init_cb;
@@ -144,9 +144,8 @@ void CmaRenderer::Flush(const base::Closure& flush_cb) {
}
DCHECK_EQ(state_, kPlaying) << state_;
- media_pipeline_->Flush(
- ::media::BindToCurrentLoop(
- base::Bind(&CmaRenderer::OnFlushDone, weak_this_)));
+ media_pipeline_->Flush(::media::BindToCurrentLoop(
+ base::Bind(&CmaRenderer::OnFlushDone, weak_this_)));
{
base::AutoLock auto_lock(time_interpolator_lock_);
@@ -248,7 +247,8 @@ void CmaRenderer::InitializeAudioPipeline() {
demuxer_stream_provider_->GetStream(::media::DemuxerStream::AUDIO);
::media::PipelineStatusCB audio_initialization_done_cb =
::media::BindToCurrentLoop(
- base::Bind(&CmaRenderer::OnAudioPipelineInitializeDone, weak_this_,
+ base::Bind(&CmaRenderer::OnAudioPipelineInitializeDone,
+ weak_this_,
stream != nullptr));
if (!stream) {
CMALOG(kLogControl) << __FUNCTION__ << ": no audio stream, skipping init.";
@@ -260,8 +260,8 @@ void CmaRenderer::InitializeAudioPipeline() {
AvPipelineClient av_pipeline_client;
av_pipeline_client.eos_cb = ::media::BindToCurrentLoop(
base::Bind(&CmaRenderer::OnEosReached, weak_this_, true));
- av_pipeline_client.playback_error_cb = ::media::BindToCurrentLoop(
- base::Bind(&CmaRenderer::OnError, weak_this_));
+ av_pipeline_client.playback_error_cb =
+ ::media::BindToCurrentLoop(base::Bind(&CmaRenderer::OnError, weak_this_));
av_pipeline_client.statistics_cb = ::media::BindToCurrentLoop(
base::Bind(&CmaRenderer::OnStatisticsUpdated, weak_this_));
audio_pipeline_->SetClient(av_pipeline_client);
@@ -307,7 +307,8 @@ void CmaRenderer::InitializeVideoPipeline() {
demuxer_stream_provider_->GetStream(::media::DemuxerStream::VIDEO);
::media::PipelineStatusCB video_initialization_done_cb =
::media::BindToCurrentLoop(
- base::Bind(&CmaRenderer::OnVideoPipelineInitializeDone, weak_this_,
+ base::Bind(&CmaRenderer::OnVideoPipelineInitializeDone,
+ weak_this_,
stream != nullptr));
if (!stream) {
CMALOG(kLogControl) << __FUNCTION__ << ": no video stream, skipping init.";
@@ -319,8 +320,8 @@ void CmaRenderer::InitializeVideoPipeline() {
VideoPipelineClient client;
client.av_pipeline_client.eos_cb = ::media::BindToCurrentLoop(
base::Bind(&CmaRenderer::OnEosReached, weak_this_, false));
- client.av_pipeline_client.playback_error_cb = ::media::BindToCurrentLoop(
- base::Bind(&CmaRenderer::OnError, weak_this_));
+ client.av_pipeline_client.playback_error_cb =
+ ::media::BindToCurrentLoop(base::Bind(&CmaRenderer::OnError, weak_this_));
client.av_pipeline_client.statistics_cb = ::media::BindToCurrentLoop(
base::Bind(&CmaRenderer::OnStatisticsUpdated, weak_this_));
client.natural_size_changed_cb = ::media::BindToCurrentLoop(
@@ -339,9 +340,7 @@ void CmaRenderer::InitializeVideoPipeline() {
std::vector<::media::VideoDecoderConfig> configs;
configs.push_back(config);
media_pipeline_->InitializeVideo(
- configs,
- frame_provider.Pass(),
- video_initialization_done_cb);
+ configs, frame_provider.Pass(), video_initialization_done_cb);
}
void CmaRenderer::OnVideoPipelineInitializeDone(
@@ -383,8 +382,7 @@ void CmaRenderer::OnEosReached(bool is_audio) {
bool audio_finished = !has_audio_ || received_audio_eos_;
bool video_finished = !has_video_ || received_video_eos_;
- CMALOG(kLogControl) << __FUNCTION__
- << " audio_finished=" << audio_finished
+ CMALOG(kLogControl) << __FUNCTION__ << " audio_finished=" << audio_finished
<< " video_finished=" << video_finished;
if (audio_finished && video_finished)
ended_cb_.Run();
@@ -402,10 +400,9 @@ void CmaRenderer::OnNaturalSizeChanged(const gfx::Size& size) {
hole_frame_factory_->CreateHoleFrame(size));
}
-void CmaRenderer::OnPlaybackTimeUpdated(
- base::TimeDelta time,
- base::TimeDelta max_time,
- base::TimeTicks capture_time) {
+void CmaRenderer::OnPlaybackTimeUpdated(base::TimeDelta time,
+ base::TimeDelta max_time,
+ base::TimeTicks capture_time) {
DCHECK(thread_checker_.CalledOnValidThread());
if (state_ != kPlaying) {
LOG(WARNING) << "Ignoring a late time update";
diff --git a/chromecast/media/cma/filters/cma_renderer.h b/chromecast/renderer/media/cma_renderer.h
index f76226b..85ad368 100644
--- a/chromecast/media/cma/filters/cma_renderer.h
+++ b/chromecast/renderer/media/cma_renderer.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef CHROMECAST_MEDIA_CMA_FILTERS_CMA_RENDERER_H_
-#define CHROMECAST_MEDIA_CMA_FILTERS_CMA_RENDERER_H_
+#ifndef CHROMECAST_RENDERER_MEDIA_CMA_RENDERER_H_
+#define CHROMECAST_RENDERER_MEDIA_CMA_RENDERER_H_
#include "base/macros.h"
#include "base/memory/scoped_ptr.h"
@@ -29,29 +29,28 @@ class VideoRendererSink;
namespace chromecast {
namespace media {
-class AudioPipeline;
+class AudioPipelineProxy;
class BalancedMediaTaskRunnerFactory;
class HoleFrameFactory;
-class MediaPipeline;
-class VideoPipeline;
+class MediaPipelineProxy;
+class VideoPipelineProxy;
class CmaRenderer : public ::media::Renderer {
public:
- CmaRenderer(scoped_ptr<MediaPipeline> media_pipeline,
+ CmaRenderer(scoped_ptr<MediaPipelineProxy> media_pipeline,
::media::VideoRendererSink* video_renderer_sink,
const scoped_refptr<::media::GpuVideoAcceleratorFactories>&
gpu_factories);
~CmaRenderer() override;
// ::media::Renderer implementation:
- void Initialize(
- ::media::DemuxerStreamProvider* demuxer_stream_provider,
- const ::media::PipelineStatusCB& init_cb,
- const ::media::StatisticsCB& statistics_cb,
- const ::media::BufferingStateCB& buffering_state_cb,
- const base::Closure& ended_cb,
- const ::media::PipelineStatusCB& error_cb,
- const base::Closure& waiting_for_decryption_key_cb) override;
+ void Initialize(::media::DemuxerStreamProvider* demuxer_stream_provider,
+ const ::media::PipelineStatusCB& init_cb,
+ const ::media::StatisticsCB& statistics_cb,
+ const ::media::BufferingStateCB& buffering_state_cb,
+ const base::Closure& ended_cb,
+ const ::media::PipelineStatusCB& error_cb,
+ const base::Closure& waiting_for_decryption_key_cb) override;
void Flush(const base::Closure& flush_cb) override;
void StartPlayingFrom(base::TimeDelta time) override;
void SetPlaybackRate(double playback_rate) override;
@@ -102,9 +101,9 @@ class CmaRenderer : public ::media::Renderer {
base::ThreadChecker thread_checker_;
scoped_refptr<BalancedMediaTaskRunnerFactory> media_task_runner_factory_;
- scoped_ptr<MediaPipeline> media_pipeline_;
- AudioPipeline* audio_pipeline_;
- VideoPipeline* video_pipeline_;
+ scoped_ptr<MediaPipelineProxy> media_pipeline_;
+ AudioPipelineProxy* audio_pipeline_;
+ VideoPipelineProxy* video_pipeline_;
::media::VideoRendererSink* video_renderer_sink_;
::media::DemuxerStreamProvider* demuxer_stream_provider_;
@@ -143,7 +142,7 @@ class CmaRenderer : public ::media::Renderer {
// Tracks the most recent media time update and provides interpolated values
// as playback progresses.
- scoped_ptr< ::media::TimeDeltaInterpolator> time_interpolator_;
+ scoped_ptr<::media::TimeDeltaInterpolator> time_interpolator_;
double playback_rate_;
@@ -156,4 +155,4 @@ class CmaRenderer : public ::media::Renderer {
} // namespace media
} // namespace chromecast
-#endif // CHROMECAST_MEDIA_CMA_FILTERS_CMA_RENDERER_H_
+#endif // CHROMECAST_RENDERER_MEDIA_CMA_RENDERER_H_
diff --git a/chromecast/media/cma/filters/demuxer_stream_adapter.cc b/chromecast/renderer/media/demuxer_stream_adapter.cc
index 6c98da3..789827d 100644
--- a/chromecast/media/cma/filters/demuxer_stream_adapter.cc
+++ b/chromecast/renderer/media/demuxer_stream_adapter.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "chromecast/media/cma/filters/demuxer_stream_adapter.h"
+#include "chromecast/renderer/media/demuxer_stream_adapter.h"
#include "base/bind.h"
#include "base/callback_helpers.h"
diff --git a/chromecast/media/cma/filters/demuxer_stream_adapter.h b/chromecast/renderer/media/demuxer_stream_adapter.h
index 5e8d817..7fbc7cd 100644
--- a/chromecast/media/cma/filters/demuxer_stream_adapter.h
+++ b/chromecast/renderer/media/demuxer_stream_adapter.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef CHROMECAST_MEDIA_CMA_FILTERS_DEMUXER_STREAM_ADAPTER_H_
-#define CHROMECAST_MEDIA_CMA_FILTERS_DEMUXER_STREAM_ADAPTER_H_
+#ifndef CHROMECAST_RENDERER_MEDIA_DEMUXER_STREAM_ADAPTER_H_
+#define CHROMECAST_RENDERER_MEDIA_DEMUXER_STREAM_ADAPTER_H_
#include "base/callback.h"
#include "base/memory/scoped_ptr.h"
@@ -34,7 +34,7 @@ class DemuxerStreamAdapter : public CodedFrameProvider {
DemuxerStreamAdapter(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
const scoped_refptr<BalancedMediaTaskRunnerFactory>&
- media_task_runner_factory,
+ media_task_runner_factory,
::media::DemuxerStream* demuxer_stream);
~DemuxerStreamAdapter() override;
@@ -51,7 +51,7 @@ class DemuxerStreamAdapter : public CodedFrameProvider {
// Callback invoked from the demuxer stream to signal a buffer is ready.
void OnNewBuffer(const ReadCB& read_cb,
::media::DemuxerStream::Status status,
- const scoped_refptr< ::media::DecoderBuffer>& input);
+ const scoped_refptr<::media::DecoderBuffer>& input);
base::ThreadChecker thread_checker_;
@@ -90,4 +90,4 @@ class DemuxerStreamAdapter : public CodedFrameProvider {
} // namespace media
} // namespace chromecast
-#endif // CHROMECAST_MEDIA_CMA_FILTERS_DEMUXER_STREAM_ADAPTER_H_
+#endif // CHROMECAST_RENDERER_MEDIA_DEMUXER_STREAM_ADAPTER_H_
diff --git a/chromecast/media/cma/filters/demuxer_stream_adapter_unittest.cc b/chromecast/renderer/media/demuxer_stream_adapter_unittest.cc
index 4657244..cc74308 100644
--- a/chromecast/media/cma/filters/demuxer_stream_adapter_unittest.cc
+++ b/chromecast/renderer/media/demuxer_stream_adapter_unittest.cc
@@ -13,9 +13,9 @@
#include "base/time/time.h"
#include "chromecast/media/cma/base/balanced_media_task_runner_factory.h"
#include "chromecast/media/cma/base/decoder_buffer_base.h"
-#include "chromecast/media/cma/filters/demuxer_stream_adapter.h"
-#include "chromecast/media/cma/test/demuxer_stream_for_test.h"
#include "chromecast/public/media/cast_decoder_buffer.h"
+#include "chromecast/renderer/media/demuxer_stream_adapter.h"
+#include "chromecast/renderer/media/demuxer_stream_for_test.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
@@ -72,9 +72,10 @@ DemuxerStreamAdapterTest::~DemuxerStreamAdapterTest() {
void DemuxerStreamAdapterTest::Initialize(
::media::DemuxerStream* demuxer_stream) {
- coded_frame_provider_.reset(new DemuxerStreamAdapter(
- base::ThreadTaskRunnerHandle::Get(),
- scoped_refptr<BalancedMediaTaskRunnerFactory>(), demuxer_stream));
+ coded_frame_provider_.reset(
+ new DemuxerStreamAdapter(base::ThreadTaskRunnerHandle::Get(),
+ scoped_refptr<BalancedMediaTaskRunnerFactory>(),
+ demuxer_stream));
}
void DemuxerStreamAdapterTest::Start() {
@@ -89,9 +90,8 @@ void DemuxerStreamAdapterTest::Start() {
base::Unretained(this)),
base::TimeDelta::FromSeconds(5));
- coded_frame_provider_->Read(
- base::Bind(&DemuxerStreamAdapterTest::OnNewFrame,
- base::Unretained(this)));
+ coded_frame_provider_->Read(base::Bind(&DemuxerStreamAdapterTest::OnNewFrame,
+ base::Unretained(this)));
}
void DemuxerStreamAdapterTest::OnTestTimeout() {
@@ -116,21 +116,18 @@ void DemuxerStreamAdapterTest::OnNewFrame(
frame_received_count_++;
if (frame_received_count_ >= total_frames_) {
- coded_frame_provider_->Flush(
- base::Bind(&DemuxerStreamAdapterTest::OnFlushCompleted,
- base::Unretained(this)));
+ coded_frame_provider_->Flush(base::Bind(
+ &DemuxerStreamAdapterTest::OnFlushCompleted, base::Unretained(this)));
return;
}
- coded_frame_provider_->Read(
- base::Bind(&DemuxerStreamAdapterTest::OnNewFrame,
- base::Unretained(this)));
+ coded_frame_provider_->Read(base::Bind(&DemuxerStreamAdapterTest::OnNewFrame,
+ base::Unretained(this)));
ASSERT_LE(frame_received_count_, early_flush_idx_);
if (frame_received_count_ == early_flush_idx_) {
- base::Closure flush_cb =
- base::Bind(&DemuxerStreamAdapterTest::OnFlushCompleted,
- base::Unretained(this));
+ base::Closure flush_cb = base::Bind(
+ &DemuxerStreamAdapterTest::OnFlushCompleted, base::Unretained(this));
if (use_post_task_for_flush_) {
base::MessageLoop::current()->PostTask(
FROM_HERE,
@@ -152,7 +149,7 @@ void DemuxerStreamAdapterTest::OnFlushCompleted() {
TEST_F(DemuxerStreamAdapterTest, NoDelay) {
total_frames_ = 10;
- early_flush_idx_ = total_frames_; // No early flush.
+ early_flush_idx_ = total_frames_; // No early flush.
total_expected_frames_ = 10;
config_idx_.push_back(0);
config_idx_.push_back(5);
@@ -172,7 +169,7 @@ TEST_F(DemuxerStreamAdapterTest, NoDelay) {
TEST_F(DemuxerStreamAdapterTest, AllDelayed) {
total_frames_ = 10;
- early_flush_idx_ = total_frames_; // No early flush.
+ early_flush_idx_ = total_frames_; // No early flush.
total_expected_frames_ = 10;
config_idx_.push_back(0);
config_idx_.push_back(5);
diff --git a/chromecast/media/cma/test/demuxer_stream_for_test.cc b/chromecast/renderer/media/demuxer_stream_for_test.cc
index 74e28ce..228f1de 100644
--- a/chromecast/media/cma/test/demuxer_stream_for_test.cc
+++ b/chromecast/renderer/media/demuxer_stream_for_test.cc
@@ -3,7 +3,7 @@
// found in the LICENSE file.
#include "base/threading/thread.h"
-#include "chromecast/media/cma/test/demuxer_stream_for_test.h"
+#include "chromecast/renderer/media/demuxer_stream_for_test.h"
namespace chromecast {
namespace media {
@@ -35,8 +35,9 @@ void DemuxerStreamForTest::Read(const ReadCB& read_cb) {
if ((frame_count_ % cycle_count_) < delayed_frame_count_) {
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
- FROM_HERE, base::Bind(&DemuxerStreamForTest::DoRead,
- base::Unretained(this), read_cb),
+ FROM_HERE,
+ base::Bind(
+ &DemuxerStreamForTest::DoRead, base::Unretained(this), read_cb),
base::TimeDelta::FromMilliseconds(20));
return;
}
@@ -52,10 +53,16 @@ void DemuxerStreamForTest::Read(const ReadCB& read_cb) {
gfx::Size coded_size(640, 480);
gfx::Rect visible_rect(640, 480);
gfx::Size natural_size(640, 480);
- return ::media::VideoDecoderConfig(
- ::media::kCodecH264, ::media::VIDEO_CODEC_PROFILE_UNKNOWN,
- ::media::PIXEL_FORMAT_YV12, ::media::COLOR_SPACE_UNSPECIFIED, coded_size,
- visible_rect, natural_size, NULL, 0, false);
+ return ::media::VideoDecoderConfig(::media::kCodecH264,
+ ::media::VIDEO_CODEC_PROFILE_UNKNOWN,
+ ::media::PIXEL_FORMAT_YV12,
+ ::media::COLOR_SPACE_UNSPECIFIED,
+ coded_size,
+ visible_rect,
+ natural_size,
+ NULL,
+ 0,
+ false);
}
::media::DemuxerStream::Type DemuxerStreamForTest::type() const {
diff --git a/chromecast/media/cma/test/demuxer_stream_for_test.h b/chromecast/renderer/media/demuxer_stream_for_test.h
index afa7aae..20d8d41 100644
--- a/chromecast/media/cma/test/demuxer_stream_for_test.h
+++ b/chromecast/renderer/media/demuxer_stream_for_test.h
@@ -2,14 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef CHROMECAST_MEDIA_CMA_TEST_DUMMY_DEMUXER_STREAM_H_
-#define CHROMECAST_MEDIA_CMA_TEST_DUMMY_DEMUXER_STREAM_H_
+#ifndef CHROMECAST_RENDERER_MEDIA_DEMUXER_STREAM_FOR_TEST_H_
+#define CHROMECAST_RENDERER_MEDIA_DEMUXER_STREAM_FOR_TEST_H_
#include <list>
#include "base/bind.h"
#include "base/thread_task_runner_handle.h"
-#include "chromecast/media/cma/filters/demuxer_stream_adapter.h"
+#include "chromecast/renderer/media/demuxer_stream_adapter.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
@@ -71,4 +71,5 @@ class DemuxerStreamForTest : public ::media::DemuxerStream {
} // namespace media
} // namespace chromecast
-#endif
+
+#endif // CHROMECAST_RENDERER_MEDIA_DEMUXER_STREAM_FOR_TEST_H_
diff --git a/chromecast/media/cma/filters/hole_frame_factory.cc b/chromecast/renderer/media/hole_frame_factory.cc
index 4d4b333..6012439 100644
--- a/chromecast/media/cma/filters/hole_frame_factory.cc
+++ b/chromecast/renderer/media/hole_frame_factory.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "chromecast/media/cma/filters/hole_frame_factory.h"
+#include "chromecast/renderer/media/hole_frame_factory.h"
#include "base/bind.h"
#include "base/location.h"
@@ -16,18 +16,15 @@ namespace media {
HoleFrameFactory::HoleFrameFactory(
const scoped_refptr<::media::GpuVideoAcceleratorFactories>& gpu_factories)
- : gpu_factories_(gpu_factories),
- texture_(0),
- image_id_(0),
- sync_point_(0) {
+ : gpu_factories_(gpu_factories), texture_(0), image_id_(0), sync_point_(0) {
if (gpu_factories_) {
gpu::gles2::GLES2Interface* gl = gpu_factories_->GetGLES2Interface();
CHECK(gl);
gl->GenTextures(1, &texture_);
gl->BindTexture(GL_TEXTURE_2D, texture_);
- image_id_ = gl->CreateGpuMemoryBufferImageCHROMIUM(1, 1, GL_RGBA,
- GL_SCANOUT_CHROMIUM);
+ image_id_ = gl->CreateGpuMemoryBufferImageCHROMIUM(
+ 1, 1, GL_RGBA, GL_SCANOUT_CHROMIUM);
gl->BindTexImage2DCHROMIUM(GL_TEXTURE_2D, image_id_);
gl->GenMailboxCHROMIUM(mailbox_.name);
diff --git a/chromecast/media/cma/filters/hole_frame_factory.h b/chromecast/renderer/media/hole_frame_factory.h
index c77fb6b..a937a58 100644
--- a/chromecast/media/cma/filters/hole_frame_factory.h
+++ b/chromecast/renderer/media/hole_frame_factory.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef CHROMECAST_MEDIA_CMA_FILTERS_HOLE_FRAME_FACTORY_H_
-#define CHROMECAST_MEDIA_CMA_FILTERS_HOLE_FRAME_FACTORY_H_
+#ifndef CHROMECAST_RENDERER_MEDIA_HOLE_FRAME_FACTORY_H_
+#define CHROMECAST_RENDERER_MEDIA_HOLE_FRAME_FACTORY_H_
#include <GLES2/gl2.h>
@@ -46,4 +46,4 @@ class HoleFrameFactory {
} // namespace media
} // namespace chromecast
-#endif // CHROMECAST_MEDIA_CMA_FILTERS_HOLE_FRAME_FACTORY_H_
+#endif // CHROMECAST_RENDERER_MEDIA_HOLE_FRAME_FACTORY_H_
diff --git a/chromecast/renderer/media/media_pipeline_proxy.cc b/chromecast/renderer/media/media_pipeline_proxy.cc
index 38a6998..681d9eb 100644
--- a/chromecast/renderer/media/media_pipeline_proxy.cc
+++ b/chromecast/renderer/media/media_pipeline_proxy.cc
@@ -80,8 +80,7 @@ void MediaPipelineProxyInternal::Shutdown() {
CmaMessageFilterProxy::MediaDelegate());
}
-void MediaPipelineProxyInternal::SetClient(
- const MediaPipelineClient& client) {
+void MediaPipelineProxyInternal::SetClient(const MediaPipelineClient& client) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!client.error_cb.is_null());
DCHECK(!client.buffering_state_cb.is_null());
@@ -185,8 +184,7 @@ MediaPipelineProxy::~MediaPipelineProxy() {
FROM_HERE, base::Bind(&MediaChannelProxy::Close, media_channel_proxy_));
}
-void MediaPipelineProxy::SetClient(
- const MediaPipelineClient& client) {
+void MediaPipelineProxy::SetClient(const MediaPipelineClient& client) {
DCHECK(thread_checker_.CalledOnValidThread());
FORWARD_ON_IO_THREAD(SetClient, client);
}
@@ -196,11 +194,11 @@ void MediaPipelineProxy::SetCdm(int cdm_id) {
FORWARD_ON_IO_THREAD(SetCdm, render_frame_id_, cdm_id);
}
-AudioPipeline* MediaPipelineProxy::GetAudioPipeline() const {
+AudioPipelineProxy* MediaPipelineProxy::GetAudioPipeline() const {
return audio_pipeline_.get();
}
-VideoPipeline* MediaPipelineProxy::GetVideoPipeline() const {
+VideoPipelineProxy* MediaPipelineProxy::GetVideoPipeline() const {
return video_pipeline_.get();
}
diff --git a/chromecast/renderer/media/media_pipeline_proxy.h b/chromecast/renderer/media/media_pipeline_proxy.h
index 1e8e9e6..71689d8 100644
--- a/chromecast/renderer/media/media_pipeline_proxy.h
+++ b/chromecast/renderer/media/media_pipeline_proxy.h
@@ -11,9 +11,10 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
#include "chromecast/media/cma/pipeline/load_type.h"
-#include "chromecast/media/cma/pipeline/media_pipeline.h"
#include "chromecast/media/cma/pipeline/media_pipeline_client.h"
+#include "media/base/audio_decoder_config.h"
#include "media/base/serial_runner.h"
+#include "media/base/video_decoder_config.h"
namespace base {
class SingleThreadTaskRunner;
@@ -22,34 +23,32 @@ class SingleThreadTaskRunner;
namespace chromecast {
namespace media {
class AudioPipelineProxy;
+class CodedFrameProvider;
class MediaChannelProxy;
class MediaPipelineProxyInternal;
class VideoPipelineProxy;
-class MediaPipelineProxy : public MediaPipeline {
+class MediaPipelineProxy {
public:
MediaPipelineProxy(int render_frame_id,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
LoadType load_type);
- ~MediaPipelineProxy() override;
-
- // MediaPipeline implementation.
- void SetClient(const MediaPipelineClient& client) override;
- void SetCdm(int cdm_id) override;
- AudioPipeline* GetAudioPipeline() const override;
- VideoPipeline* GetVideoPipeline() const override;
- void InitializeAudio(
- const ::media::AudioDecoderConfig& config,
- scoped_ptr<CodedFrameProvider> frame_provider,
- const ::media::PipelineStatusCB& status_cb) override;
- void InitializeVideo(
- const std::vector<::media::VideoDecoderConfig>& configs,
- scoped_ptr<CodedFrameProvider> frame_provider,
- const ::media::PipelineStatusCB& status_cb) override;
- void StartPlayingFrom(base::TimeDelta time) override;
- void Flush(const ::media::PipelineStatusCB& status_cb) override;
- void Stop() override;
- void SetPlaybackRate(double playback_rate) override;
+ ~MediaPipelineProxy();
+
+ void SetClient(const MediaPipelineClient& client);
+ void SetCdm(int cdm_id);
+ AudioPipelineProxy* GetAudioPipeline() const;
+ VideoPipelineProxy* GetVideoPipeline() const;
+ void InitializeAudio(const ::media::AudioDecoderConfig& config,
+ scoped_ptr<CodedFrameProvider> frame_provider,
+ const ::media::PipelineStatusCB& status_cb);
+ void InitializeVideo(const std::vector<::media::VideoDecoderConfig>& configs,
+ scoped_ptr<CodedFrameProvider> frame_provider,
+ const ::media::PipelineStatusCB& status_cb);
+ void StartPlayingFrom(base::TimeDelta time);
+ void Flush(const ::media::PipelineStatusCB& status_cb);
+ void Stop();
+ void SetPlaybackRate(double playback_rate);
private:
void OnProxyFlushDone(const ::media::PipelineStatusCB& status_cb,
diff --git a/chromecast/media/cma/filters/multi_demuxer_stream_adapter_unittest.cc b/chromecast/renderer/media/multi_demuxer_stream_adapter_unittest.cc
index ed5c519..4f878b4 100644
--- a/chromecast/media/cma/filters/multi_demuxer_stream_adapter_unittest.cc
+++ b/chromecast/renderer/media/multi_demuxer_stream_adapter_unittest.cc
@@ -12,9 +12,9 @@
#include "base/time/time.h"
#include "chromecast/media/cma/base/balanced_media_task_runner_factory.h"
#include "chromecast/media/cma/base/decoder_buffer_base.h"
-#include "chromecast/media/cma/filters/demuxer_stream_adapter.h"
-#include "chromecast/media/cma/test/demuxer_stream_for_test.h"
#include "chromecast/public/media/cast_decoder_buffer.h"
+#include "chromecast/renderer/media/demuxer_stream_adapter.h"
+#include "chromecast/renderer/media/demuxer_stream_for_test.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
@@ -76,8 +76,9 @@ MultiDemuxerStreamAdaptersTest::~MultiDemuxerStreamAdaptersTest() {
void MultiDemuxerStreamAdaptersTest::Start() {
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
- FROM_HERE, base::Bind(&MultiDemuxerStreamAdaptersTest::OnTestTimeout,
- base::Unretained(this)),
+ FROM_HERE,
+ base::Bind(&MultiDemuxerStreamAdaptersTest::OnTestTimeout,
+ base::Unretained(this)),
base::TimeDelta::FromSeconds(5));
media_task_runner_factory_ = new BalancedMediaTaskRunnerFactory(
@@ -89,18 +90,20 @@ void MultiDemuxerStreamAdaptersTest::Start() {
for (auto& stream : demuxer_streams_) {
coded_frame_providers_.push_back(make_scoped_ptr(
new DemuxerStreamAdapter(base::ThreadTaskRunnerHandle::Get(),
- media_task_runner_factory_, stream)));
+ media_task_runner_factory_,
+ stream)));
}
running_stream_count_ = coded_frame_providers_.size();
// read each stream
for (auto& code_frame_provider : coded_frame_providers_) {
auto read_cb = base::Bind(&MultiDemuxerStreamAdaptersTest::OnNewFrame,
- base::Unretained(this), code_frame_provider);
+ base::Unretained(this),
+ code_frame_provider);
- base::Closure task =
- base::Bind(&CodedFrameProvider::Read,
- base::Unretained(code_frame_provider), read_cb);
+ base::Closure task = base::Bind(&CodedFrameProvider::Read,
+ base::Unretained(code_frame_provider),
+ read_cb);
base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, task);
}
@@ -124,7 +127,8 @@ void MultiDemuxerStreamAdaptersTest::OnNewFrame(
frame_received_count_++;
auto read_cb = base::Bind(&MultiDemuxerStreamAdaptersTest::OnNewFrame,
- base::Unretained(this), frame_provider);
+ base::Unretained(this),
+ frame_provider);
frame_provider->Read(read_cb);
}
diff --git a/chromecast/renderer/media/video_pipeline_proxy.cc b/chromecast/renderer/media/video_pipeline_proxy.cc
index bcef083..b5931b1 100644
--- a/chromecast/renderer/media/video_pipeline_proxy.cc
+++ b/chromecast/renderer/media/video_pipeline_proxy.cc
@@ -17,6 +17,7 @@
#include "chromecast/media/cma/base/coded_frame_provider.h"
#include "chromecast/media/cma/ipc/media_message_fifo.h"
#include "chromecast/media/cma/ipc_streamer/av_streamer_proxy.h"
+#include "chromecast/media/cma/pipeline/video_pipeline_client.h"
#include "chromecast/renderer/media/cma_message_filter_proxy.h"
#include "chromecast/renderer/media/media_channel_proxy.h"
#include "media/base/bind_to_current_loop.h"
@@ -213,8 +214,7 @@ VideoPipelineProxy::~VideoPipelineProxy() {
base::Bind(&VideoPipelineProxyInternal::Release, base::Passed(&proxy_)));
}
-void VideoPipelineProxy::SetClient(
- const VideoPipelineClient& video_client) {
+void VideoPipelineProxy::SetClient(const VideoPipelineClient& video_client) {
DCHECK(thread_checker_.CalledOnValidThread());
base::Closure pipe_read_cb =
::media::BindToCurrentLoop(
@@ -294,4 +294,4 @@ void VideoPipelineProxy::OnPipeRead() {
}
} // namespace media
-} // namespace chromecast \ No newline at end of file
+} // namespace chromecast
diff --git a/chromecast/renderer/media/video_pipeline_proxy.h b/chromecast/renderer/media/video_pipeline_proxy.h
index e30be8c..1ec76c3 100644
--- a/chromecast/renderer/media/video_pipeline_proxy.h
+++ b/chromecast/renderer/media/video_pipeline_proxy.h
@@ -12,7 +12,6 @@
#include "base/memory/scoped_ptr.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
-#include "chromecast/media/cma/pipeline/video_pipeline.h"
#include "media/base/pipeline_status.h"
namespace base {
@@ -26,17 +25,17 @@ class VideoDecoderConfig;
namespace chromecast {
namespace media {
-struct AvPipelineClient;
class AvStreamerProxy;
class CodedFrameProvider;
-class VideoPipelineProxyInternal;
class MediaChannelProxy;
+struct VideoPipelineClient;
+class VideoPipelineProxyInternal;
-class VideoPipelineProxy : public VideoPipeline {
+class VideoPipelineProxy {
public:
VideoPipelineProxy(scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
scoped_refptr<MediaChannelProxy> media_channel_proxy);
- ~VideoPipelineProxy() override;
+ ~VideoPipelineProxy();
void Initialize(const std::vector<::media::VideoDecoderConfig>& configs,
scoped_ptr<CodedFrameProvider> frame_provider,
@@ -45,8 +44,7 @@ class VideoPipelineProxy : public VideoPipeline {
void Flush(const base::Closure& done_cb);
void Stop();
- // VideoPipeline implementation.
- void SetClient(const VideoPipelineClient& video_client) override;
+ void SetClient(const VideoPipelineClient& video_client);
private:
base::ThreadChecker thread_checker_;