summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-02-16 02:03:16 +0000
committerscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2011-02-16 02:03:16 +0000
commit4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e (patch)
tree03e28346b7752cec02e91ba925694548b49b8184 /media
parent9cadfb34adaf42ad30e398b24ab06e5bc587e57a (diff)
downloadchromium_src-4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e.zip
chromium_src-4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e.tar.gz
chromium_src-4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e.tar.bz2
Implement webkit media metrics in chromium.
This implements the chromium side of the webkit media statistics feature. A followup cl (in webkit) will wire the two sides together. Patch by sjl@chromium.org: http://codereview.chromium.org/6246091/ BUG=71255 TEST=media_unittests git-svn-id: svn://svn.chromium.org/chrome/trunk/src@75050 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/base/filters.h16
-rw-r--r--media/base/mock_filters.cc6
-rw-r--r--media/base/mock_filters.h26
-rw-r--r--media/base/pipeline.h17
-rw-r--r--media/base/pipeline_impl.cc24
-rw-r--r--media/base/pipeline_impl.h7
-rw-r--r--media/base/pipeline_impl_unittest.cc14
-rw-r--r--media/filters/decoder_base.h14
-rw-r--r--media/filters/decoder_base_unittest.cc13
-rw-r--r--media/filters/ffmpeg_audio_decoder.cc14
-rw-r--r--media/filters/ffmpeg_video_decoder.cc11
-rw-r--r--media/filters/ffmpeg_video_decoder.h7
-rw-r--r--media/filters/ffmpeg_video_decoder_unittest.cc52
-rw-r--r--media/filters/omx_video_decoder.cc10
-rw-r--r--media/filters/omx_video_decoder.h8
-rw-r--r--media/filters/video_renderer_base.cc21
-rw-r--r--media/filters/video_renderer_base.h5
-rw-r--r--media/filters/video_renderer_base_unittest.cc15
-rw-r--r--media/tools/omx_test/omx_test.cc4
-rw-r--r--media/video/ffmpeg_video_decode_engine.cc12
-rw-r--r--media/video/ffmpeg_video_decode_engine_unittest.cc12
-rw-r--r--media/video/mft_h264_decode_engine.cc22
-rw-r--r--media/video/mft_h264_decode_engine.h4
-rw-r--r--media/video/mft_h264_decode_engine_unittest.cc3
-rw-r--r--media/video/omx_video_decode_engine.cc21
-rw-r--r--media/video/video_decode_engine.h5
26 files changed, 275 insertions, 88 deletions
diff --git a/media/base/filters.h b/media/base/filters.h
index 84c466d..06f44c2 100644
--- a/media/base/filters.h
+++ b/media/base/filters.h
@@ -41,9 +41,14 @@ class DemuxerStream;
class Filter;
class FilterHost;
+struct PipelineStatistics;
+
// Used for completing asynchronous methods.
typedef Callback0::Type FilterCallback;
+// Used for updating pipeline statistics.
+typedef Callback1<const PipelineStatistics&>::Type StatisticsCallback;
+
class Filter : public base::RefCountedThreadSafe<Filter> {
public:
Filter();
@@ -193,7 +198,9 @@ class VideoDecoder : public Filter {
// Initialize a VideoDecoder with the given DemuxerStream, executing the
// callback upon completion.
- virtual void Initialize(DemuxerStream* stream, FilterCallback* callback) = 0;
+ // stats_callback is used to update global pipeline statistics.
+ virtual void Initialize(DemuxerStream* stream, FilterCallback* callback,
+ StatisticsCallback* stats_callback) = 0;
// |set_fill_buffer_done_callback| install permanent callback from downstream
// filter (i.e. Renderer). The callback is used to deliver video frames at
@@ -235,7 +242,9 @@ class AudioDecoder : public Filter {
// Initialize a AudioDecoder with the given DemuxerStream, executing the
// callback upon completion.
- virtual void Initialize(DemuxerStream* stream, FilterCallback* callback) = 0;
+ // stats_callback is used to update global pipeline statistics.
+ virtual void Initialize(DemuxerStream* stream, FilterCallback* callback,
+ StatisticsCallback* stats_callback) = 0;
// |set_fill_buffer_done_callback| install permanent callback from downstream
// filter (i.e. Renderer). The callback is used to deliver buffers at
@@ -272,7 +281,8 @@ class VideoRenderer : public Filter {
// Initialize a VideoRenderer with the given VideoDecoder, executing the
// callback upon completion.
- virtual void Initialize(VideoDecoder* decoder, FilterCallback* callback) = 0;
+ virtual void Initialize(VideoDecoder* decoder, FilterCallback* callback,
+ StatisticsCallback* stats_callback) = 0;
// Returns true if this filter has received and processed an end-of-stream
// buffer.
diff --git a/media/base/mock_filters.cc b/media/base/mock_filters.cc
index 8aa5458..ebfa90c 100644
--- a/media/base/mock_filters.cc
+++ b/media/base/mock_filters.cc
@@ -65,6 +65,12 @@ void RunFilterCallback(::testing::Unused, FilterCallback* callback) {
delete callback;
}
+void RunFilterCallback3(::testing::Unused, FilterCallback* callback,
+ ::testing::Unused) {
+ callback->Run();
+ delete callback;
+}
+
void DestroyFilterCallback(::testing::Unused, FilterCallback* callback) {
delete callback;
}
diff --git a/media/base/mock_filters.h b/media/base/mock_filters.h
index 3439597..22eba74 100644
--- a/media/base/mock_filters.h
+++ b/media/base/mock_filters.h
@@ -17,6 +17,7 @@
#include "media/base/filters.h"
#include "media/base/filter_collection.h"
+#include "media/base/pipeline.h"
#include "media/base/video_frame.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -143,8 +144,9 @@ class MockVideoDecoder : public VideoDecoder {
MOCK_METHOD0(OnAudioRendererDisabled, void());
// VideoDecoder implementation.
- MOCK_METHOD2(Initialize, void(DemuxerStream* stream,
- FilterCallback* callback));
+ MOCK_METHOD3(Initialize, void(DemuxerStream* stream,
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback));
MOCK_METHOD0(media_format, const MediaFormat&());
MOCK_METHOD1(ProduceVideoFrame, void(scoped_refptr<VideoFrame>));
MOCK_METHOD0(ProvidesBuffer, bool());
@@ -172,8 +174,9 @@ class MockAudioDecoder : public AudioDecoder {
MOCK_METHOD0(OnAudioRendererDisabled, void());
// AudioDecoder implementation.
- MOCK_METHOD2(Initialize, void(DemuxerStream* stream,
- FilterCallback* callback));
+ MOCK_METHOD3(Initialize, void(DemuxerStream* stream,
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback));
MOCK_METHOD0(media_format, const MediaFormat&());
MOCK_METHOD1(ProduceAudioSamples, void(scoped_refptr<Buffer>));
@@ -200,8 +203,9 @@ class MockVideoRenderer : public VideoRenderer {
MOCK_METHOD0(OnAudioRendererDisabled, void());
// VideoRenderer implementation.
- MOCK_METHOD2(Initialize, void(VideoDecoder* decoder,
- FilterCallback* callback));
+ MOCK_METHOD3(Initialize, void(VideoDecoder* decoder,
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback));
MOCK_METHOD0(HasEnded, bool());
MOCK_METHOD1(ConsumeVideoFrame, void(scoped_refptr<VideoFrame> frame));
@@ -267,10 +271,12 @@ class MockFilterCollection {
DISALLOW_COPY_AND_ASSIGN(MockFilterCollection);
};
-// Helper gmock function that immediately executes and destroys the
+// Helper gmock functions that immediately executes and destroys the
// FilterCallback on behalf of the provided filter. Can be used when mocking
// the Initialize() and Seek() methods.
void RunFilterCallback(::testing::Unused, FilterCallback* callback);
+void RunFilterCallback3(::testing::Unused, FilterCallback* callback,
+ ::testing::Unused);
// Helper gmock function that immediately destroys the FilterCallback on behalf
// of the provided filter. Can be used when mocking the Initialize() and Seek()
@@ -311,6 +317,12 @@ ACTION_P(DisableAudioRenderer, filter) {
filter->host()->DisableAudioRenderer();
}
+// Helper mock statistics callback.
+class MockStatisticsCallback {
+ public:
+ MOCK_METHOD1(OnStatistics, void(const media::PipelineStatistics& statistics));
+};
+
} // namespace media
#endif // MEDIA_BASE_MOCK_FILTERS_H_
diff --git a/media/base/pipeline.h b/media/base/pipeline.h
index 61a1110..0b87d29 100644
--- a/media/base/pipeline.h
+++ b/media/base/pipeline.h
@@ -42,6 +42,20 @@ enum PipelineError {
DEMUXER_ERROR_COULD_NOT_CREATE_THREAD,
};
+struct PipelineStatistics {
+ PipelineStatistics() :
+ audio_bytes_decoded(0),
+ video_bytes_decoded(0),
+ video_frames_decoded(0),
+ video_frames_dropped(0) {
+ }
+
+ uint32 audio_bytes_decoded; // Should be uint64?
+ uint32 video_bytes_decoded; // Should be uint64?
+ uint32 video_frames_decoded;
+ uint32 video_frames_dropped;
+};
+
class FilterCollection;
// Client-provided callbacks for various pipeline operations. Clients should
@@ -172,6 +186,9 @@ class Pipeline : public base::RefCountedThreadSafe<Pipeline> {
// operating correctly, this will return OK.
virtual PipelineError GetError() const = 0;
+ // Gets the current pipeline statistics.
+ virtual PipelineStatistics GetStatistics() const = 0;
+
protected:
// Only allow ourselves to be deleted by reference counting.
friend class base::RefCountedThreadSafe<Pipeline>;
diff --git a/media/base/pipeline_impl.cc b/media/base/pipeline_impl.cc
index a0fda64..76a07cd 100644
--- a/media/base/pipeline_impl.cc
+++ b/media/base/pipeline_impl.cc
@@ -274,6 +274,11 @@ PipelineError PipelineImpl::GetError() const {
return error_;
}
+PipelineStatistics PipelineImpl::GetStatistics() const {
+ base::AutoLock auto_lock(lock_);
+ return statistics_;
+}
+
void PipelineImpl::SetCurrentReadPosition(int64 offset) {
base::AutoLock auto_lock(lock_);
@@ -530,6 +535,15 @@ void PipelineImpl::OnTeardownStateTransition() {
NewRunnableMethod(this, &PipelineImpl::TeardownStateTransitionTask));
}
+// Called from any thread.
+void PipelineImpl::OnUpdateStatistics(const PipelineStatistics& stats) {
+ base::AutoLock auto_lock(lock_);
+ statistics_.audio_bytes_decoded += stats.audio_bytes_decoded;
+ statistics_.video_bytes_decoded += stats.video_bytes_decoded;
+ statistics_.video_frames_decoded += stats.video_frames_decoded;
+ statistics_.video_frames_dropped += stats.video_frames_dropped;
+}
+
void PipelineImpl::StartTask(FilterCollection* filter_collection,
const std::string& url,
PipelineCallback* start_callback) {
@@ -1074,7 +1088,8 @@ bool PipelineImpl::InitializeAudioDecoder(
pipeline_init_state_->audio_decoder_ = audio_decoder;
audio_decoder->Initialize(
stream,
- NewCallback(this, &PipelineImpl::OnFilterInitialize));
+ NewCallback(this, &PipelineImpl::OnFilterInitialize),
+ NewCallback(this, &PipelineImpl::OnUpdateStatistics));
return true;
}
@@ -1103,7 +1118,8 @@ bool PipelineImpl::InitializeVideoDecoder(
pipeline_init_state_->video_decoder_ = video_decoder;
video_decoder->Initialize(
stream,
- NewCallback(this, &PipelineImpl::OnFilterInitialize));
+ NewCallback(this, &PipelineImpl::OnFilterInitialize),
+ NewCallback(this, &PipelineImpl::OnUpdateStatistics));
return true;
}
@@ -1147,7 +1163,9 @@ bool PipelineImpl::InitializeVideoRenderer(
return false;
video_renderer_->Initialize(
- decoder, NewCallback(this, &PipelineImpl::OnFilterInitialize));
+ decoder,
+ NewCallback(this, &PipelineImpl::OnFilterInitialize),
+ NewCallback(this, &PipelineImpl::OnUpdateStatistics));
return true;
}
diff --git a/media/base/pipeline_impl.h b/media/base/pipeline_impl.h
index 3611416..128e350 100644
--- a/media/base/pipeline_impl.h
+++ b/media/base/pipeline_impl.h
@@ -92,6 +92,7 @@ class PipelineImpl : public Pipeline, public FilterHost {
virtual bool IsStreaming() const;
virtual bool IsLoaded() const;
virtual PipelineError GetError() const;
+ virtual PipelineStatistics GetStatistics() const;
private:
// Pipeline states, as described above.
@@ -186,6 +187,9 @@ class PipelineImpl : public Pipeline, public FilterHost {
// Callback executed by filters when completing teardown operations.
void OnTeardownStateTransition();
+ // Callback executed by filters to update statistics.
+ void OnUpdateStatistics(const PipelineStatistics& stats);
+
// The following "task" methods correspond to the public methods, but these
// methods are run as the result of posting a task to the PipelineInternal's
// message loop.
@@ -405,6 +409,9 @@ class PipelineImpl : public Pipeline, public FilterHost {
class PipelineInitState;
scoped_ptr<PipelineInitState> pipeline_init_state_;
+ // Statistics.
+ PipelineStatistics statistics_;
+
FRIEND_TEST_ALL_PREFIXES(PipelineImplTest, GetBufferedTime);
FRIEND_TEST_ALL_PREFIXES(PipelineImplTest, AudioStreamShorterThanVideo);
diff --git a/media/base/pipeline_impl_unittest.cc b/media/base/pipeline_impl_unittest.cc
index 54e4dbf..45caef8 100644
--- a/media/base/pipeline_impl_unittest.cc
+++ b/media/base/pipeline_impl_unittest.cc
@@ -148,8 +148,9 @@ class PipelineImplTest : public ::testing::Test {
// Sets up expectations to allow the video decoder to initialize.
void InitializeVideoDecoder(MockDemuxerStream* stream) {
- EXPECT_CALL(*mocks_->video_decoder(), Initialize(stream, NotNull()))
- .WillOnce(Invoke(&RunFilterCallback));
+ EXPECT_CALL(*mocks_->video_decoder(),
+ Initialize(stream, NotNull(), NotNull()))
+ .WillOnce(Invoke(&RunFilterCallback3));
EXPECT_CALL(*mocks_->video_decoder(), SetPlaybackRate(0.0f));
EXPECT_CALL(*mocks_->video_decoder(), Seek(base::TimeDelta(), NotNull()))
.WillOnce(Invoke(&RunFilterCallback));
@@ -159,8 +160,9 @@ class PipelineImplTest : public ::testing::Test {
// Sets up expectations to allow the audio decoder to initialize.
void InitializeAudioDecoder(MockDemuxerStream* stream) {
- EXPECT_CALL(*mocks_->audio_decoder(), Initialize(stream, NotNull()))
- .WillOnce(Invoke(&RunFilterCallback));
+ EXPECT_CALL(*mocks_->audio_decoder(),
+ Initialize(stream, NotNull(), NotNull()))
+ .WillOnce(Invoke(&RunFilterCallback3));
EXPECT_CALL(*mocks_->audio_decoder(), SetPlaybackRate(0.0f));
EXPECT_CALL(*mocks_->audio_decoder(), Seek(base::TimeDelta(), NotNull()))
.WillOnce(Invoke(&RunFilterCallback));
@@ -171,8 +173,8 @@ class PipelineImplTest : public ::testing::Test {
// Sets up expectations to allow the video renderer to initialize.
void InitializeVideoRenderer() {
EXPECT_CALL(*mocks_->video_renderer(),
- Initialize(mocks_->video_decoder(), NotNull()))
- .WillOnce(Invoke(&RunFilterCallback));
+ Initialize(mocks_->video_decoder(), NotNull(), NotNull()))
+ .WillOnce(Invoke(&RunFilterCallback3));
EXPECT_CALL(*mocks_->video_renderer(), SetPlaybackRate(0.0f));
EXPECT_CALL(*mocks_->video_renderer(), Seek(base::TimeDelta(), NotNull()))
.WillOnce(Invoke(&RunFilterCallback));
diff --git a/media/filters/decoder_base.h b/media/filters/decoder_base.h
index 7ddd16e..2d660c9 100644
--- a/media/filters/decoder_base.h
+++ b/media/filters/decoder_base.h
@@ -42,7 +42,9 @@ class DecoderBase : public Decoder {
// Decoder implementation.
virtual void Initialize(DemuxerStream* demuxer_stream,
- FilterCallback* callback) {
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback) {
+ statistics_callback_.reset(stats_callback);
message_loop_->PostTask(
FROM_HERE,
NewRunnableMethod(this,
@@ -118,7 +120,9 @@ class DecoderBase : public Decoder {
MediaFormat media_format_;
- void OnDecodeComplete() {
+ void OnDecodeComplete(const PipelineStatistics& statistics) {
+ statistics_callback_->Run(statistics);
+
// Attempt to fulfill a pending read callback and schedule additional reads
// if necessary.
bool fulfilled = FulfillPendingRead();
@@ -192,7 +196,8 @@ class DecoderBase : public Decoder {
}
}
- void InitializeTask(DemuxerStream* demuxer_stream, FilterCallback* callback) {
+ void InitializeTask(DemuxerStream* demuxer_stream,
+ FilterCallback* callback) {
DCHECK_EQ(MessageLoop::current(), message_loop_);
CHECK(kUninitialized == state_);
CHECK(!demuxer_stream_);
@@ -307,6 +312,9 @@ class DecoderBase : public Decoder {
};
State state_;
+ // Callback to update pipeline statistics.
+ scoped_ptr<StatisticsCallback> statistics_callback_;
+
DISALLOW_COPY_AND_ASSIGN(DecoderBase);
};
diff --git a/media/filters/decoder_base_unittest.cc b/media/filters/decoder_base_unittest.cc
index 661ba4e..72e6e26 100644
--- a/media/filters/decoder_base_unittest.cc
+++ b/media/filters/decoder_base_unittest.cc
@@ -7,6 +7,7 @@
#include "media/base/mock_callback.h"
#include "media/base/mock_filters.h"
#include "media/base/mock_task.h"
+#include "media/base/pipeline.h"
#include "media/filters/decoder_base.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -103,7 +104,7 @@ ACTION(CompleteDemuxRequest) {
delete arg0;
}
-// Test the flow control of decoder base by the following sequnce of actions:
+// Test the flow control of decoder base by the following sequence of actions:
// - Read() -> DecoderStream
// \ Read() -> DemuxerStream
// - Read() -> DecoderBase
@@ -123,11 +124,14 @@ TEST(DecoderBaseTest, FlowControl) {
decoder->set_consume_audio_samples_callback(
NewCallback(&read_callback, &MockDecoderCallback::OnReadComplete));
scoped_refptr<MockDemuxerStream> demuxer_stream(new MockDemuxerStream());
+ MockStatisticsCallback stats_callback_object;
- // Initailize.
+ // Initialize.
EXPECT_CALL(*decoder, DoInitialize(NotNull(), NotNull(), NotNull()))
.WillOnce(Initialize());
- decoder->Initialize(demuxer_stream.get(), NewExpectedCallback());
+ decoder->Initialize(demuxer_stream.get(), NewExpectedCallback(),
+ NewCallback(&stats_callback_object,
+ &MockStatisticsCallback::OnStatistics));
message_loop.RunAllPending();
// Read.
@@ -145,9 +149,10 @@ TEST(DecoderBaseTest, FlowControl) {
// Fulfill the decode request.
EXPECT_CALL(read_callback, OnReadComplete(_)).Times(2);
+ PipelineStatistics statistics;
for (size_t i = 0; i < decode_requests.size(); ++i) {
decoder->EnqueueResult(new MockDecoderOutput());
- decoder->OnDecodeComplete();
+ decoder->OnDecodeComplete(statistics);
}
decode_requests.clear();
message_loop.RunAllPending();
diff --git a/media/filters/ffmpeg_audio_decoder.cc b/media/filters/ffmpeg_audio_decoder.cc
index 030149a..254222d 100644
--- a/media/filters/ffmpeg_audio_decoder.cc
+++ b/media/filters/ffmpeg_audio_decoder.cc
@@ -148,6 +148,8 @@ static void ConvertAudioF32ToS32(void* buffer, int buffer_size) {
}
void FFmpegAudioDecoder::DoDecode(Buffer* input) {
+ PipelineStatistics statistics;
+
// FFmpeg tends to seek Ogg audio streams in the middle of nowhere, giving us
// a whole bunch of AV_NOPTS_VALUE packets. Discard them until we find
// something valid. Refer to http://crbug.com/49709
@@ -155,7 +157,7 @@ void FFmpegAudioDecoder::DoDecode(Buffer* input) {
if (input->GetTimestamp() == kNoTimestamp &&
estimated_next_timestamp_ == kNoTimestamp &&
!input->IsEndOfStream()) {
- DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete();
+ DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete(statistics);
return;
}
@@ -165,6 +167,8 @@ void FFmpegAudioDecoder::DoDecode(Buffer* input) {
packet.data = const_cast<uint8*>(input->GetData());
packet.size = input->GetDataSize();
+ statistics.audio_bytes_decoded = input->GetDataSize();
+
int16_t* output_buffer = reinterpret_cast<int16_t*>(output_buffer_.get());
int output_buffer_size = kOutputBufferSize;
int result = avcodec_decode_audio3(codec_context_,
@@ -185,7 +189,7 @@ void FFmpegAudioDecoder::DoDecode(Buffer* input) {
<< input->GetTimestamp().InMicroseconds() << " us, duration: "
<< input->GetDuration().InMicroseconds() << " us, packet size: "
<< input->GetDataSize() << " bytes";
- DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete();
+ DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete(statistics);
return;
}
@@ -216,7 +220,7 @@ void FFmpegAudioDecoder::DoDecode(Buffer* input) {
}
EnqueueResult(result_buffer);
- DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete();
+ DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete(statistics);
return;
}
@@ -227,7 +231,7 @@ void FFmpegAudioDecoder::DoDecode(Buffer* input) {
input->GetTimestamp() != kNoTimestamp &&
input->GetDuration() != kNoTimestamp) {
estimated_next_timestamp_ = input->GetTimestamp() + input->GetDuration();
- DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete();
+ DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete(statistics);
return;
}
@@ -241,7 +245,7 @@ void FFmpegAudioDecoder::DoDecode(Buffer* input) {
result_buffer->SetDuration(input->GetDuration());
EnqueueResult(result_buffer);
}
- DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete();
+ DecoderBase<AudioDecoder, Buffer>::OnDecodeComplete(statistics);
}
base::TimeDelta FFmpegAudioDecoder::CalculateDuration(size_t size) {
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index 13b26b2..d4dca2f 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -37,14 +37,15 @@ FFmpegVideoDecoder::~FFmpegVideoDecoder() {
}
void FFmpegVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
- FilterCallback* callback) {
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback) {
if (MessageLoop::current() != message_loop_) {
message_loop_->PostTask(
FROM_HERE,
NewRunnableMethod(this,
&FFmpegVideoDecoder::Initialize,
make_scoped_refptr(demuxer_stream),
- callback));
+ callback, stats_callback));
return;
}
@@ -54,6 +55,7 @@ void FFmpegVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
demuxer_stream_ = demuxer_stream;
initialize_callback_.reset(callback);
+ statistics_callback_.reset(stats_callback);
// Get the AVStream by querying for the provider interface.
AVStreamProvider* av_stream_provider;
@@ -331,10 +333,13 @@ void FFmpegVideoDecoder::ProduceVideoFrame(
}
void FFmpegVideoDecoder::ConsumeVideoFrame(
- scoped_refptr<VideoFrame> video_frame) {
+ scoped_refptr<VideoFrame> video_frame,
+ const PipelineStatistics& statistics) {
DCHECK_EQ(MessageLoop::current(), message_loop_);
DCHECK_NE(state_, kStopped);
+ statistics_callback_->Run(statistics);
+
if (video_frame.get()) {
if (kPausing == state_ || kFlushing == state_) {
frame_queue_flushed_.push_back(video_frame);
diff --git a/media/filters/ffmpeg_video_decoder.h b/media/filters/ffmpeg_video_decoder.h
index 82e5a8c..4e6e132 100644
--- a/media/filters/ffmpeg_video_decoder.h
+++ b/media/filters/ffmpeg_video_decoder.h
@@ -37,7 +37,8 @@ class FFmpegVideoDecoder : public VideoDecoder,
// Decoder implementation.
virtual void Initialize(DemuxerStream* demuxer_stream,
- FilterCallback* callback);
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback);
virtual const MediaFormat& media_format();
virtual void ProduceVideoFrame(scoped_refptr<VideoFrame> video_frame);
virtual bool ProvidesBuffer();
@@ -51,7 +52,8 @@ class FFmpegVideoDecoder : public VideoDecoder,
virtual void OnError();
virtual void OnFormatChange(VideoStreamInfo stream_info);
virtual void ProduceVideoSample(scoped_refptr<Buffer> buffer);
- virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame);
+ virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics);
friend class DecoderPrivateMock;
friend class FFmpegVideoDecoderTest;
@@ -126,6 +128,7 @@ class FFmpegVideoDecoder : public VideoDecoder,
scoped_ptr<FilterCallback> uninitialize_callback_;
scoped_ptr<FilterCallback> flush_callback_;
scoped_ptr<FilterCallback> seek_callback_;
+ scoped_ptr<StatisticsCallback> statistics_callback_;
// Hold video frames when flush happens.
std::deque<scoped_refptr<VideoFrame> > frame_queue_flushed_;
diff --git a/media/filters/ffmpeg_video_decoder_unittest.cc b/media/filters/ffmpeg_video_decoder_unittest.cc
index 128e531..da87170 100644
--- a/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -80,8 +80,9 @@ class DecoderPrivateMock : public FFmpegVideoDecoder {
void ProduceVideoSample(scoped_refptr<Buffer> buffer) {
FFmpegVideoDecoder::ProduceVideoSample(buffer);
}
- void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) {
- FFmpegVideoDecoder::ConsumeVideoFrame(frame);
+ void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics) {
+ FFmpegVideoDecoder::ConsumeVideoFrame(frame, statistics);
}
void OnReadComplete(Buffer* buffer) {
FFmpegVideoDecoder::OnReadComplete(buffer);
@@ -118,6 +119,7 @@ class FFmpegVideoDecoderTest : public testing::Test {
static const FFmpegVideoDecoder::TimeTuple kTestPts1;
static const FFmpegVideoDecoder::TimeTuple kTestPts2;
static const FFmpegVideoDecoder::TimeTuple kTestPts3;
+ static const PipelineStatistics kStatistics;
FFmpegVideoDecoderTest() {
MediaFormat media_format;
@@ -186,9 +188,16 @@ class FFmpegVideoDecoderTest : public testing::Test {
EXPECT_CALL(*engine_, Initialize(_, _, _, _))
.WillOnce(EngineInitialize(engine_, true));
- decoder_->Initialize(demuxer_, NewExpectedCallback());
+ decoder_->Initialize(demuxer_,
+ NewExpectedCallback(), NewStatisticsCallback());
message_loop_.RunAllPending();
}
+
+ StatisticsCallback* NewStatisticsCallback() {
+ return NewCallback(&stats_callback_object_,
+ &MockStatisticsCallback::OnStatistics);
+ }
+
// Fixture members.
MockVideoDecodeEngine* engine_; // Owned by |decoder_|.
scoped_refptr<DecoderPrivateMock> decoder_;
@@ -196,6 +205,7 @@ class FFmpegVideoDecoderTest : public testing::Test {
scoped_refptr<StrictMock<MockFFmpegDemuxerStream> > demuxer_;
scoped_refptr<DataBuffer> buffer_;
scoped_refptr<DataBuffer> end_of_stream_buffer_;
+ MockStatisticsCallback stats_callback_object_;
StrictMock<MockFilterHost> host_;
MessageLoop message_loop_;
@@ -223,13 +233,17 @@ const FFmpegVideoDecoder::TimeTuple FFmpegVideoDecoderTest::kTestPts3 =
{ base::TimeDelta::FromMicroseconds(789),
base::TimeDelta::FromMicroseconds(60) };
+const PipelineStatistics FFmpegVideoDecoderTest::kStatistics;
+
TEST_F(FFmpegVideoDecoderTest, Initialize_QueryInterfaceFails) {
// Test QueryInterface returning NULL.
EXPECT_CALL(*demuxer_, QueryInterface(AVStreamProvider::interface_id()))
.WillOnce(ReturnNull());
EXPECT_CALL(host_, SetError(PIPELINE_ERROR_DECODE));
- decoder_->Initialize(demuxer_, NewExpectedCallback());
+ decoder_->Initialize(demuxer_,
+ NewExpectedCallback(), NewStatisticsCallback());
+
message_loop_.RunAllPending();
}
@@ -246,7 +260,8 @@ TEST_F(FFmpegVideoDecoderTest, Initialize_EngineFails) {
EXPECT_CALL(host_, SetError(PIPELINE_ERROR_DECODE));
- decoder_->Initialize(demuxer_, NewExpectedCallback());
+ decoder_->Initialize(demuxer_,
+ NewExpectedCallback(), NewStatisticsCallback());
message_loop_.RunAllPending();
}
@@ -351,15 +366,15 @@ ACTION_P3(ReturnFromDemux, decoder, buffer, time_tuple) {
decoder->OnReadComplete(buffer);
}
-ACTION_P3(DecodeComplete, decoder, video_frame, time_tuple) {
+ACTION_P4(DecodeComplete, decoder, video_frame, time_tuple, statistics) {
video_frame->SetTimestamp(time_tuple.timestamp);
video_frame->SetDuration(time_tuple.duration);
- decoder->ConsumeVideoFrame(video_frame);
+ decoder->ConsumeVideoFrame(video_frame, statistics);
}
-ACTION_P2(DecodeNotComplete, decoder, buffer) {
+ACTION_P3(DecodeNotComplete, decoder, buffer, statistics) {
scoped_refptr<VideoFrame> null_frame;
if (buffer->IsEndOfStream()) // We had started flushing.
- decoder->ConsumeVideoFrame(null_frame);
+ decoder->ConsumeVideoFrame(null_frame, statistics);
else
decoder->ProduceVideoSample(buffer);
}
@@ -406,14 +421,21 @@ TEST_F(FFmpegVideoDecoderTest, DoDecode_TestStateTransition) {
.WillOnce(ReturnFromDemux(decoder_.get(),
end_of_stream_buffer_, kTestPts3));
EXPECT_CALL(*engine_, ConsumeVideoSample(_))
- .WillOnce(DecodeNotComplete(decoder_.get(), buffer_))
- .WillOnce(DecodeComplete(decoder_.get(), video_frame_, kTestPts1))
- .WillOnce(DecodeNotComplete(decoder_.get(), buffer_))
- .WillOnce(DecodeComplete(decoder_.get(), video_frame_, kTestPts2))
- .WillOnce(DecodeComplete(decoder_.get(), video_frame_, kTestPts3))
- .WillOnce(DecodeNotComplete(decoder_.get(), end_of_stream_buffer_));
+ .WillOnce(DecodeNotComplete(decoder_.get(), buffer_, kStatistics))
+ .WillOnce(DecodeComplete(decoder_.get(),
+ video_frame_, kTestPts1, kStatistics))
+ .WillOnce(DecodeNotComplete(decoder_.get(),
+ buffer_, kStatistics))
+ .WillOnce(DecodeComplete(decoder_.get(),
+ video_frame_, kTestPts2, kStatistics))
+ .WillOnce(DecodeComplete(decoder_.get(),
+ video_frame_, kTestPts3, kStatistics))
+ .WillOnce(DecodeNotComplete(decoder_.get(),
+ end_of_stream_buffer_, kStatistics));
EXPECT_CALL(*renderer_.get(), ConsumeVideoFrame(_))
.Times(4);
+ EXPECT_CALL(stats_callback_object_, OnStatistics(_))
+ .Times(4);
// First request from renderer: at first round decode engine did not produce
// any frame. Decoder will issue another read from demuxer. at second round
diff --git a/media/filters/omx_video_decoder.cc b/media/filters/omx_video_decoder.cc
index 3a31bf7..bc8617b 100644
--- a/media/filters/omx_video_decoder.cc
+++ b/media/filters/omx_video_decoder.cc
@@ -31,14 +31,15 @@ OmxVideoDecoder::~OmxVideoDecoder() {
}
void OmxVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
- FilterCallback* callback) {
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback) {
if (MessageLoop::current() != message_loop_) {
message_loop_->PostTask(
FROM_HERE,
NewRunnableMethod(this,
&OmxVideoDecoder::Initialize,
make_scoped_refptr(demuxer_stream),
- callback));
+ callback, stats_callback));
return;
}
@@ -47,6 +48,7 @@ void OmxVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
DCHECK(!initialize_callback_.get());
initialize_callback_.reset(callback);
+ statistics_callback_.reset(stats_callback);
demuxer_stream_ = demuxer_stream;
// We require bit stream converter for openmax hardware decoder.
@@ -204,8 +206,10 @@ void OmxVideoDecoder::ProduceVideoSample(scoped_refptr<Buffer> buffer) {
demuxer_stream_->Read(NewCallback(this, &OmxVideoDecoder::DemuxCompleteTask));
}
-void OmxVideoDecoder::ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) {
+void OmxVideoDecoder::ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics) {
DCHECK_EQ(message_loop_, MessageLoop::current());
+ statistics_callback_->Run(statistics);
VideoFrameReady(frame);
}
diff --git a/media/filters/omx_video_decoder.h b/media/filters/omx_video_decoder.h
index ddec99f..7c6a118 100644
--- a/media/filters/omx_video_decoder.h
+++ b/media/filters/omx_video_decoder.h
@@ -28,7 +28,9 @@ class OmxVideoDecoder : public VideoDecoder,
virtual ~OmxVideoDecoder();
// Filter implementations.
- virtual void Initialize(DemuxerStream* stream, FilterCallback* callback);
+ virtual void Initialize(DemuxerStream* stream,
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback);
virtual void Stop(FilterCallback* callback);
virtual void Flush(FilterCallback* callback);
virtual void Seek(base::TimeDelta time, FilterCallback* callback);
@@ -45,7 +47,8 @@ class OmxVideoDecoder : public VideoDecoder,
virtual void OnError();
virtual void OnFormatChange(VideoStreamInfo stream_info);
virtual void ProduceVideoSample(scoped_refptr<Buffer> buffer);
- virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame);
+ virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics);
// TODO(hclam): This is very ugly that we keep reference instead of
// scoped_refptr.
@@ -65,6 +68,7 @@ class OmxVideoDecoder : public VideoDecoder,
scoped_ptr<FilterCallback> uninitialize_callback_;
scoped_ptr<FilterCallback> flush_callback_;
scoped_ptr<FilterCallback> seek_callback_;
+ scoped_ptr<StatisticsCallback> statistics_callback_;
VideoCodecInfo info_;
diff --git a/media/filters/video_renderer_base.cc b/media/filters/video_renderer_base.cc
index 16b1fbf..857b9f8 100644
--- a/media/filters/video_renderer_base.cc
+++ b/media/filters/video_renderer_base.cc
@@ -157,14 +157,18 @@ void VideoRendererBase::Seek(base::TimeDelta time, FilterCallback* callback) {
}
void VideoRendererBase::Initialize(VideoDecoder* decoder,
- FilterCallback* callback) {
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback) {
base::AutoLock auto_lock(lock_);
DCHECK(decoder);
DCHECK(callback);
+ DCHECK(stats_callback);
DCHECK_EQ(kUninitialized, state_);
decoder_ = decoder;
AutoCallbackRunner done_runner(callback);
+ statistics_callback_.reset(stats_callback);
+
decoder_->set_consume_video_frame_callback(
NewCallback(this, &VideoRendererBase::ConsumeVideoFrame));
// Notify the pipeline of the video dimensions.
@@ -219,7 +223,17 @@ void VideoRendererBase::ThreadMain() {
base::PlatformThread::SetName("CrVideoRenderer");
base::TimeDelta remaining_time;
+ uint32 frames_dropped = 0;
+
for (;;) {
+ if (frames_dropped > 0) {
+ PipelineStatistics statistics;
+ statistics.video_frames_dropped = frames_dropped;
+ statistics_callback_->Run(statistics);
+
+ frames_dropped = 0;
+ }
+
base::AutoLock auto_lock(lock_);
const base::TimeDelta kIdleTimeDelta =
@@ -310,6 +324,7 @@ void VideoRendererBase::ThreadMain() {
// which is the first frame in the queue.
timeout_frame = frames_queue_ready_.front();
frames_queue_ready_.pop_front();
+ ++frames_dropped;
}
}
if (timeout_frame.get()) {
@@ -375,6 +390,10 @@ void VideoRendererBase::PutCurrentFrame(scoped_refptr<VideoFrame> frame) {
}
void VideoRendererBase::ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) {
+ PipelineStatistics statistics;
+ statistics.video_frames_decoded = 1;
+ statistics_callback_->Run(statistics);
+
base::AutoLock auto_lock(lock_);
// Decoder could reach seek state before our Seek() get called.
diff --git a/media/filters/video_renderer_base.h b/media/filters/video_renderer_base.h
index 1e1c4fe..303f798 100644
--- a/media/filters/video_renderer_base.h
+++ b/media/filters/video_renderer_base.h
@@ -53,7 +53,9 @@ class VideoRendererBase : public VideoRenderer,
virtual void Seek(base::TimeDelta time, FilterCallback* callback);
// VideoRenderer implementation.
- virtual void Initialize(VideoDecoder* decoder, FilterCallback* callback);
+ virtual void Initialize(VideoDecoder* decoder,
+ FilterCallback* callback,
+ StatisticsCallback* stats_callback);
virtual bool HasEnded();
// PlatformThread::Delegate implementation.
@@ -208,6 +210,7 @@ class VideoRendererBase : public VideoRenderer,
// Filter callbacks.
scoped_ptr<FilterCallback> flush_callback_;
scoped_ptr<FilterCallback> seek_callback_;
+ scoped_ptr<StatisticsCallback> statistics_callback_;
base::TimeDelta seek_timestamp_;
diff --git a/media/filters/video_renderer_base_unittest.cc b/media/filters/video_renderer_base_unittest.cc
index c15f8c3..5bef6ea 100644
--- a/media/filters/video_renderer_base_unittest.cc
+++ b/media/filters/video_renderer_base_unittest.cc
@@ -102,7 +102,8 @@ class VideoRendererBaseTest : public ::testing::Test {
.WillOnce(Return(true));
// Initialize, we shouldn't have any reads.
- renderer_->Initialize(decoder_, NewExpectedCallback());
+ renderer_->Initialize(decoder_,
+ NewExpectedCallback(), NewStatisticsCallback());
EXPECT_EQ(0u, read_queue_.size());
// Now seek to trigger prerolling.
@@ -157,11 +158,17 @@ class VideoRendererBaseTest : public ::testing::Test {
static const size_t kHeight;
static const int64 kDuration;
+ StatisticsCallback* NewStatisticsCallback() {
+ return NewCallback(&stats_callback_object_,
+ &MockStatisticsCallback::OnStatistics);
+ }
+
// Fixture members.
scoped_refptr<MockVideoRendererBase> renderer_;
scoped_refptr<MockVideoDecoder> decoder_;
StrictMock<MockFilterHost> host_;
MediaFormat decoder_media_format_;
+ MockStatisticsCallback stats_callback_object_;
// Receives all the buffers that renderer had provided to |decoder_|.
std::deque<scoped_refptr<VideoFrame> > read_queue_;
@@ -199,7 +206,8 @@ TEST_F(VideoRendererBaseTest, Initialize_BadMediaFormat) {
EXPECT_CALL(host_, SetError(PIPELINE_ERROR_INITIALIZATION_FAILED));
// Initialize, we expect to have no reads.
- renderer_->Initialize(bad_decoder, NewExpectedCallback());
+ renderer_->Initialize(bad_decoder,
+ NewExpectedCallback(), NewStatisticsCallback());
EXPECT_EQ(0u, read_queue_.size());
}
@@ -218,7 +226,8 @@ TEST_F(VideoRendererBaseTest, Initialize_Failed) {
EXPECT_CALL(host_, SetError(PIPELINE_ERROR_INITIALIZATION_FAILED));
// Initialize, we expect to have no reads.
- renderer_->Initialize(decoder_, NewExpectedCallback());
+ renderer_->Initialize(decoder_,
+ NewExpectedCallback(), NewStatisticsCallback());
EXPECT_EQ(0u, read_queue_.size());
}
diff --git a/media/tools/omx_test/omx_test.cc b/media/tools/omx_test/omx_test.cc
index 505f060..24cf4cc 100644
--- a/media/tools/omx_test/omx_test.cc
+++ b/media/tools/omx_test/omx_test.cc
@@ -38,6 +38,7 @@ using media::OmxConfigurator;
using media::OmxDecoderConfigurator;
using media::OmxEncoderConfigurator;
using media::OmxVideoDecodeEngine;
+using media::PipelineStatistics;
using media::VideoFrame;
using media::YuvFileReader;
@@ -131,7 +132,8 @@ class TestApp : public base::RefCountedThreadSafe<TestApp>,
FeedInputBuffer();
}
- virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) {
+ virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics) {
// This callback is received when the decoder has completed a decoding
// task and given us some output data. The frame is owned by the decoder.
if (stopped_ || error_)
diff --git a/media/video/ffmpeg_video_decode_engine.cc b/media/video/ffmpeg_video_decode_engine.cc
index 3b299068..786be41 100644
--- a/media/video/ffmpeg_video_decode_engine.cc
+++ b/media/video/ffmpeg_video_decode_engine.cc
@@ -11,6 +11,7 @@
#include "media/base/callback.h"
#include "media/base/limits.h"
#include "media/base/media_switches.h"
+#include "media/base/pipeline.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/ffmpeg_util.h"
#include "media/filters/ffmpeg_demuxer.h"
@@ -201,6 +202,9 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
packet.data = const_cast<uint8*>(buffer->GetData());
packet.size = buffer->GetDataSize();
+ PipelineStatistics statistics;
+ statistics.video_bytes_decoded = buffer->GetDataSize();
+
// Let FFmpeg handle presentation timestamp reordering.
codec_context_->reordered_opaque = buffer->GetTimestamp().InMicroseconds();
@@ -221,7 +225,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
<< buffer->GetDuration().InMicroseconds() << " us, packet size: "
<< buffer->GetDataSize() << " bytes";
// TODO(jiesun): call event_handler_->OnError() instead.
- event_handler_->ConsumeVideoFrame(video_frame);
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
return;
}
@@ -232,7 +236,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
// drained, we mark the flag. Otherwise we read from demuxer again.
if (frame_decoded == 0) {
if (buffer->IsEndOfStream()) { // We had started flushing.
- event_handler_->ConsumeVideoFrame(video_frame);
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
output_eos_reached_ = true;
} else {
ReadInput();
@@ -247,7 +251,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
!av_frame_->data[VideoFrame::kUPlane] ||
!av_frame_->data[VideoFrame::kVPlane]) {
// TODO(jiesun): call event_handler_->OnError() instead.
- event_handler_->ConsumeVideoFrame(video_frame);
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
return;
}
@@ -296,7 +300,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) {
video_frame->SetDuration(duration);
pending_output_buffers_--;
- event_handler_->ConsumeVideoFrame(video_frame);
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
}
void FFmpegVideoDecodeEngine::Uninitialize() {
diff --git a/media/video/ffmpeg_video_decode_engine_unittest.cc b/media/video/ffmpeg_video_decode_engine_unittest.cc
index 5f8a866..666e522 100644
--- a/media/video/ffmpeg_video_decode_engine_unittest.cc
+++ b/media/video/ffmpeg_video_decode_engine_unittest.cc
@@ -7,6 +7,7 @@
#include "media/base/data_buffer.h"
#include "media/base/mock_ffmpeg.h"
#include "media/base/mock_task.h"
+#include "media/base/pipeline.h"
#include "media/video/ffmpeg_video_decode_engine.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -117,7 +118,7 @@ class FFmpegVideoDecodeEngineTest : public testing::Test,
EXPECT_CALL(*this, ProduceVideoSample(_))
.WillOnce(DemuxComplete(test_engine_.get(), buffer_));
- EXPECT_CALL(*this, ConsumeVideoFrame(_))
+ EXPECT_CALL(*this, ConsumeVideoFrame(_, _))
.WillOnce(DecodeComplete(this));
test_engine_->ProduceVideoFrame(video_frame_);
}
@@ -130,8 +131,9 @@ class FFmpegVideoDecodeEngineTest : public testing::Test,
}
public:
- MOCK_METHOD1(ConsumeVideoFrame,
- void(scoped_refptr<VideoFrame> video_frame));
+ MOCK_METHOD2(ConsumeVideoFrame,
+ void(scoped_refptr<VideoFrame> video_frame,
+ const PipelineStatistics& statistics));
MOCK_METHOD1(ProduceVideoSample,
void(scoped_refptr<Buffer> buffer));
MOCK_METHOD1(OnInitializeComplete,
@@ -262,7 +264,7 @@ TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_0ByteFrame) {
EXPECT_CALL(*this, ProduceVideoSample(_))
.WillOnce(DemuxComplete(test_engine_.get(), buffer_))
.WillOnce(DemuxComplete(test_engine_.get(), buffer_));
- EXPECT_CALL(*this, ConsumeVideoFrame(_))
+ EXPECT_CALL(*this, ConsumeVideoFrame(_, _))
.WillOnce(DecodeComplete(this));
test_engine_->ProduceVideoFrame(video_frame_);
@@ -280,7 +282,7 @@ TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_DecodeError) {
EXPECT_CALL(*this, ProduceVideoSample(_))
.WillOnce(DemuxComplete(test_engine_.get(), buffer_));
- EXPECT_CALL(*this, ConsumeVideoFrame(_))
+ EXPECT_CALL(*this, ConsumeVideoFrame(_, _))
.WillOnce(DecodeComplete(this));
test_engine_->ProduceVideoFrame(video_frame_);
diff --git a/media/video/mft_h264_decode_engine.cc b/media/video/mft_h264_decode_engine.cc
index 5166b1f..340043a 100644
--- a/media/video/mft_h264_decode_engine.cc
+++ b/media/video/mft_h264_decode_engine.cc
@@ -16,6 +16,7 @@
#include "base/time.h"
#include "base/message_loop.h"
#include "media/base/limits.h"
+#include "media/base/pipeline.h"
#include "media/video/video_decode_context.h"
#pragma comment(lib, "dxva2.lib")
@@ -240,8 +241,9 @@ void MftH264DecodeEngine::Seek() {
// TODO(hclam): Seriously the logic in VideoRendererBase is flawed that we
// have to perform the following hack to get playback going.
+ PipelineStatistics statistics;
for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) {
- event_handler_->ConsumeVideoFrame(output_frames_[0]);
+ event_handler_->ConsumeVideoFrame(output_frames_[0], statistics);
}
// Seek not implemented.
@@ -253,6 +255,7 @@ void MftH264DecodeEngine::ConsumeVideoSample(scoped_refptr<Buffer> buffer) {
LOG(ERROR) << "ConsumeVideoSample: invalid state";
}
ScopedComPtr<IMFSample> sample;
+ PipelineStatistics statistics;
if (!buffer->IsEndOfStream()) {
sample.Attach(
CreateInputSample(buffer->GetData(),
@@ -268,6 +271,8 @@ void MftH264DecodeEngine::ConsumeVideoSample(scoped_refptr<Buffer> buffer) {
event_handler_->OnError();
}
}
+
+ statistics.video_bytes_decoded = buffer->GetDataSize();
} else {
if (state_ != MftH264DecodeEngine::kEosDrain) {
// End of stream, send drain messages.
@@ -280,7 +285,7 @@ void MftH264DecodeEngine::ConsumeVideoSample(scoped_refptr<Buffer> buffer) {
}
}
}
- DoDecode();
+ DoDecode(statistics);
}
void MftH264DecodeEngine::ProduceVideoFrame(scoped_refptr<VideoFrame> frame) {
@@ -534,7 +539,7 @@ bool MftH264DecodeEngine::GetStreamsInfoAndBufferReqs() {
return true;
}
-bool MftH264DecodeEngine::DoDecode() {
+bool MftH264DecodeEngine::DoDecode(const PipelineStatistics& statistics) {
if (state_ != kNormal && state_ != kEosDrain) {
LOG(ERROR) << "DoDecode: not in normal or drain state";
return false;
@@ -589,7 +594,7 @@ bool MftH264DecodeEngine::DoDecode() {
// No more output from the decoder. Notify EOS and stop playback.
scoped_refptr<VideoFrame> frame;
VideoFrame::CreateEmptyFrame(&frame);
- event_handler_->ConsumeVideoFrame(frame);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
state_ = MftH264DecodeEngine::kStopped;
return false;
}
@@ -653,7 +658,7 @@ bool MftH264DecodeEngine::DoDecode() {
context_->ConvertToVideoFrame(
surface.get(), output_frames_[0],
NewRunnableMethod(this, &MftH264DecodeEngine::OnUploadVideoFrameDone,
- surface, output_frames_[0]));
+ surface, output_frames_[0], statistics));
return true;
}
// TODO(hclam): Remove this branch.
@@ -678,15 +683,16 @@ bool MftH264DecodeEngine::DoDecode() {
memcpy(dst_y, src_y, current_length);
CHECK(SUCCEEDED(output_buffer->Unlock()));
- event_handler_->ConsumeVideoFrame(frame);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
return true;
}
void MftH264DecodeEngine::OnUploadVideoFrameDone(
ScopedComPtr<IDirect3DSurface9, &IID_IDirect3DSurface9> surface,
- scoped_refptr<media::VideoFrame> frame) {
+ scoped_refptr<media::VideoFrame> frame,
+ PipelineStatistics statistics) {
// After this method is exited the reference to surface is released.
- event_handler_->ConsumeVideoFrame(frame);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
}
} // namespace media
diff --git a/media/video/mft_h264_decode_engine.h b/media/video/mft_h264_decode_engine.h
index a131c2f..d543631 100644
--- a/media/video/mft_h264_decode_engine.h
+++ b/media/video/mft_h264_decode_engine.h
@@ -74,11 +74,11 @@ class MftH264DecodeEngine : public media::VideoDecodeEngine {
bool SetDecodeEngineOutputMediaType(const GUID subtype);
bool SendMFTMessage(MFT_MESSAGE_TYPE msg);
bool GetStreamsInfoAndBufferReqs();
- bool DoDecode();
+ bool DoDecode(const PipelineStatistics& statistics);
void OnAllocFramesDone();
void OnUploadVideoFrameDone(
ScopedComPtr<IDirect3DSurface9, &IID_IDirect3DSurface9> surface,
- scoped_refptr<media::VideoFrame> frame);
+ scoped_refptr<media::VideoFrame> frame, PipelineStatistics statistics);
bool use_dxva_;
ScopedComPtr<IMFTransform> decode_engine_;
diff --git a/media/video/mft_h264_decode_engine_unittest.cc b/media/video/mft_h264_decode_engine_unittest.cc
index fcf7d69..b1cf33b 100644
--- a/media/video/mft_h264_decode_engine_unittest.cc
+++ b/media/video/mft_h264_decode_engine_unittest.cc
@@ -98,7 +98,8 @@ class SimpleMftH264DecodeEngineHandler
decoder_->ConsumeVideoSample(input);
}
}
- virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) {
+ virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics) {
fill_buffer_callback_count_++;
current_frame_ = frame;
}
diff --git a/media/video/omx_video_decode_engine.cc b/media/video/omx_video_decode_engine.cc
index 9ca3089..df3f9cf 100644
--- a/media/video/omx_video_decode_engine.cc
+++ b/media/video/omx_video_decode_engine.cc
@@ -19,6 +19,7 @@
#include "base/message_loop.h"
#include "base/string_util.h"
#include "media/base/buffers.h"
+#include "media/base/pipeline.h"
namespace media {
@@ -325,10 +326,13 @@ void OmxVideoDecodeEngine::FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer) {
// provides the buffer allocator.
if (kClientFlushing == client_state_ && !uses_egl_image_) return;
+ PipelineStatistics statistics;
+ statistics.video_bytes_decoded = buffer->nFilledLen;
+
frame->SetTimestamp(base::TimeDelta::FromMicroseconds(buffer->nTimeStamp));
frame->SetDuration(frame->GetTimestamp() - last_pts_);
last_pts_ = frame->GetTimestamp();
- event_handler_->ConsumeVideoFrame(frame);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
output_pending_request_--;
}
@@ -607,22 +611,26 @@ void OmxVideoDecodeEngine::ProduceVideoFrame(
DCHECK(video_frame.get() && !video_frame->IsEndOfStream());
output_pending_request_++;
+ PipelineStatistics statistics;
+
if (!CanAcceptOutput()) {
if (uses_egl_image_) { // return it to owner.
output_pending_request_--;
- event_handler_->ConsumeVideoFrame(video_frame);
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
}
return;
}
OMX_BUFFERHEADERTYPE* omx_buffer = FindOmxBuffer(video_frame);
if (omx_buffer) {
+ statistics.video_bytes_decoded = omx_buffer->nFilledLen;
+
if (kClientRunning == client_state_) {
SendOutputBufferToComponent(omx_buffer);
} else if (kClientFlushing == client_state_) {
if (uses_egl_image_) { // return it to owner.
output_pending_request_--;
- event_handler_->ConsumeVideoFrame(video_frame);
+ event_handler_->ConsumeVideoFrame(video_frame, statistics);
}
if (InputPortFlushed() && OutputPortFlushed())
ComponentFlushDone();
@@ -1220,11 +1228,14 @@ void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
return;
}
+ PipelineStatistics statistics;
+ statistics.video_bytes_decoded = buffer->nFilledLen;
+
if (!CanAcceptOutput()) {
if (uses_egl_image_) {
scoped_refptr<VideoFrame> frame;
frame = static_cast<VideoFrame*>(buffer->pAppPrivate);
- event_handler_->ConsumeVideoFrame(frame);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
output_pending_request_--;
}
return;
@@ -1244,7 +1255,7 @@ void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) {
// Singal end of stream.
scoped_refptr<VideoFrame> frame;
VideoFrame::CreateEmptyFrame(&frame);
- event_handler_->ConsumeVideoFrame(frame);
+ event_handler_->ConsumeVideoFrame(frame, statistics);
}
if (client_state_ == kClientFlushing &&
diff --git a/media/video/video_decode_engine.h b/media/video/video_decode_engine.h
index 4182215..57b76a2 100644
--- a/media/video/video_decode_engine.h
+++ b/media/video/video_decode_engine.h
@@ -15,6 +15,8 @@ namespace media {
class Buffer;
class VideoDecodeContext;
+struct PipelineStatistics;
+
enum VideoCodec {
kCodecH264,
kCodecVC1,
@@ -109,7 +111,8 @@ class VideoDecodeEngine {
// In the case of flushing and video frame is provided externally, this
// method is called to return the video frame object to the owner.
// The content of the video frame may be invalid.
- virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) = 0;
+ virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame,
+ const PipelineStatistics& statistics) = 0;
};
virtual ~VideoDecodeEngine() {}