diff options
author | scherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-02-16 02:03:16 +0000 |
---|---|---|
committer | scherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2011-02-16 02:03:16 +0000 |
commit | 4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e (patch) | |
tree | 03e28346b7752cec02e91ba925694548b49b8184 /media/video | |
parent | 9cadfb34adaf42ad30e398b24ab06e5bc587e57a (diff) | |
download | chromium_src-4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e.zip chromium_src-4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e.tar.gz chromium_src-4c51bc66e6496234d5a3a7cbdfb1cf07cca2167e.tar.bz2 |
Implement webkit media metrics in chromium.
This implements the chromium side of the webkit media statistics
feature.
A followup cl (in webkit) will wire the two sides together.
Patch by sjl@chromium.org:
http://codereview.chromium.org/6246091/
BUG=71255
TEST=media_unittests
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@75050 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media/video')
-rw-r--r-- | media/video/ffmpeg_video_decode_engine.cc | 12 | ||||
-rw-r--r-- | media/video/ffmpeg_video_decode_engine_unittest.cc | 12 | ||||
-rw-r--r-- | media/video/mft_h264_decode_engine.cc | 22 | ||||
-rw-r--r-- | media/video/mft_h264_decode_engine.h | 4 | ||||
-rw-r--r-- | media/video/mft_h264_decode_engine_unittest.cc | 3 | ||||
-rw-r--r-- | media/video/omx_video_decode_engine.cc | 21 | ||||
-rw-r--r-- | media/video/video_decode_engine.h | 5 |
7 files changed, 53 insertions, 26 deletions
diff --git a/media/video/ffmpeg_video_decode_engine.cc b/media/video/ffmpeg_video_decode_engine.cc index 3b299068..786be41 100644 --- a/media/video/ffmpeg_video_decode_engine.cc +++ b/media/video/ffmpeg_video_decode_engine.cc @@ -11,6 +11,7 @@ #include "media/base/callback.h" #include "media/base/limits.h" #include "media/base/media_switches.h" +#include "media/base/pipeline.h" #include "media/ffmpeg/ffmpeg_common.h" #include "media/ffmpeg/ffmpeg_util.h" #include "media/filters/ffmpeg_demuxer.h" @@ -201,6 +202,9 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) { packet.data = const_cast<uint8*>(buffer->GetData()); packet.size = buffer->GetDataSize(); + PipelineStatistics statistics; + statistics.video_bytes_decoded = buffer->GetDataSize(); + // Let FFmpeg handle presentation timestamp reordering. codec_context_->reordered_opaque = buffer->GetTimestamp().InMicroseconds(); @@ -221,7 +225,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) { << buffer->GetDuration().InMicroseconds() << " us, packet size: " << buffer->GetDataSize() << " bytes"; // TODO(jiesun): call event_handler_->OnError() instead. - event_handler_->ConsumeVideoFrame(video_frame); + event_handler_->ConsumeVideoFrame(video_frame, statistics); return; } @@ -232,7 +236,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) { // drained, we mark the flag. Otherwise we read from demuxer again. if (frame_decoded == 0) { if (buffer->IsEndOfStream()) { // We had started flushing. - event_handler_->ConsumeVideoFrame(video_frame); + event_handler_->ConsumeVideoFrame(video_frame, statistics); output_eos_reached_ = true; } else { ReadInput(); @@ -247,7 +251,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) { !av_frame_->data[VideoFrame::kUPlane] || !av_frame_->data[VideoFrame::kVPlane]) { // TODO(jiesun): call event_handler_->OnError() instead. - event_handler_->ConsumeVideoFrame(video_frame); + event_handler_->ConsumeVideoFrame(video_frame, statistics); return; } @@ -296,7 +300,7 @@ void FFmpegVideoDecodeEngine::DecodeFrame(scoped_refptr<Buffer> buffer) { video_frame->SetDuration(duration); pending_output_buffers_--; - event_handler_->ConsumeVideoFrame(video_frame); + event_handler_->ConsumeVideoFrame(video_frame, statistics); } void FFmpegVideoDecodeEngine::Uninitialize() { diff --git a/media/video/ffmpeg_video_decode_engine_unittest.cc b/media/video/ffmpeg_video_decode_engine_unittest.cc index 5f8a866..666e522 100644 --- a/media/video/ffmpeg_video_decode_engine_unittest.cc +++ b/media/video/ffmpeg_video_decode_engine_unittest.cc @@ -7,6 +7,7 @@ #include "media/base/data_buffer.h" #include "media/base/mock_ffmpeg.h" #include "media/base/mock_task.h" +#include "media/base/pipeline.h" #include "media/video/ffmpeg_video_decode_engine.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" @@ -117,7 +118,7 @@ class FFmpegVideoDecodeEngineTest : public testing::Test, EXPECT_CALL(*this, ProduceVideoSample(_)) .WillOnce(DemuxComplete(test_engine_.get(), buffer_)); - EXPECT_CALL(*this, ConsumeVideoFrame(_)) + EXPECT_CALL(*this, ConsumeVideoFrame(_, _)) .WillOnce(DecodeComplete(this)); test_engine_->ProduceVideoFrame(video_frame_); } @@ -130,8 +131,9 @@ class FFmpegVideoDecodeEngineTest : public testing::Test, } public: - MOCK_METHOD1(ConsumeVideoFrame, - void(scoped_refptr<VideoFrame> video_frame)); + MOCK_METHOD2(ConsumeVideoFrame, + void(scoped_refptr<VideoFrame> video_frame, + const PipelineStatistics& statistics)); MOCK_METHOD1(ProduceVideoSample, void(scoped_refptr<Buffer> buffer)); MOCK_METHOD1(OnInitializeComplete, @@ -262,7 +264,7 @@ TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_0ByteFrame) { EXPECT_CALL(*this, ProduceVideoSample(_)) .WillOnce(DemuxComplete(test_engine_.get(), buffer_)) .WillOnce(DemuxComplete(test_engine_.get(), buffer_)); - EXPECT_CALL(*this, ConsumeVideoFrame(_)) + EXPECT_CALL(*this, ConsumeVideoFrame(_, _)) .WillOnce(DecodeComplete(this)); test_engine_->ProduceVideoFrame(video_frame_); @@ -280,7 +282,7 @@ TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_DecodeError) { EXPECT_CALL(*this, ProduceVideoSample(_)) .WillOnce(DemuxComplete(test_engine_.get(), buffer_)); - EXPECT_CALL(*this, ConsumeVideoFrame(_)) + EXPECT_CALL(*this, ConsumeVideoFrame(_, _)) .WillOnce(DecodeComplete(this)); test_engine_->ProduceVideoFrame(video_frame_); diff --git a/media/video/mft_h264_decode_engine.cc b/media/video/mft_h264_decode_engine.cc index 5166b1f..340043a 100644 --- a/media/video/mft_h264_decode_engine.cc +++ b/media/video/mft_h264_decode_engine.cc @@ -16,6 +16,7 @@ #include "base/time.h" #include "base/message_loop.h" #include "media/base/limits.h" +#include "media/base/pipeline.h" #include "media/video/video_decode_context.h" #pragma comment(lib, "dxva2.lib") @@ -240,8 +241,9 @@ void MftH264DecodeEngine::Seek() { // TODO(hclam): Seriously the logic in VideoRendererBase is flawed that we // have to perform the following hack to get playback going. + PipelineStatistics statistics; for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) { - event_handler_->ConsumeVideoFrame(output_frames_[0]); + event_handler_->ConsumeVideoFrame(output_frames_[0], statistics); } // Seek not implemented. @@ -253,6 +255,7 @@ void MftH264DecodeEngine::ConsumeVideoSample(scoped_refptr<Buffer> buffer) { LOG(ERROR) << "ConsumeVideoSample: invalid state"; } ScopedComPtr<IMFSample> sample; + PipelineStatistics statistics; if (!buffer->IsEndOfStream()) { sample.Attach( CreateInputSample(buffer->GetData(), @@ -268,6 +271,8 @@ void MftH264DecodeEngine::ConsumeVideoSample(scoped_refptr<Buffer> buffer) { event_handler_->OnError(); } } + + statistics.video_bytes_decoded = buffer->GetDataSize(); } else { if (state_ != MftH264DecodeEngine::kEosDrain) { // End of stream, send drain messages. @@ -280,7 +285,7 @@ void MftH264DecodeEngine::ConsumeVideoSample(scoped_refptr<Buffer> buffer) { } } } - DoDecode(); + DoDecode(statistics); } void MftH264DecodeEngine::ProduceVideoFrame(scoped_refptr<VideoFrame> frame) { @@ -534,7 +539,7 @@ bool MftH264DecodeEngine::GetStreamsInfoAndBufferReqs() { return true; } -bool MftH264DecodeEngine::DoDecode() { +bool MftH264DecodeEngine::DoDecode(const PipelineStatistics& statistics) { if (state_ != kNormal && state_ != kEosDrain) { LOG(ERROR) << "DoDecode: not in normal or drain state"; return false; @@ -589,7 +594,7 @@ bool MftH264DecodeEngine::DoDecode() { // No more output from the decoder. Notify EOS and stop playback. scoped_refptr<VideoFrame> frame; VideoFrame::CreateEmptyFrame(&frame); - event_handler_->ConsumeVideoFrame(frame); + event_handler_->ConsumeVideoFrame(frame, statistics); state_ = MftH264DecodeEngine::kStopped; return false; } @@ -653,7 +658,7 @@ bool MftH264DecodeEngine::DoDecode() { context_->ConvertToVideoFrame( surface.get(), output_frames_[0], NewRunnableMethod(this, &MftH264DecodeEngine::OnUploadVideoFrameDone, - surface, output_frames_[0])); + surface, output_frames_[0], statistics)); return true; } // TODO(hclam): Remove this branch. @@ -678,15 +683,16 @@ bool MftH264DecodeEngine::DoDecode() { memcpy(dst_y, src_y, current_length); CHECK(SUCCEEDED(output_buffer->Unlock())); - event_handler_->ConsumeVideoFrame(frame); + event_handler_->ConsumeVideoFrame(frame, statistics); return true; } void MftH264DecodeEngine::OnUploadVideoFrameDone( ScopedComPtr<IDirect3DSurface9, &IID_IDirect3DSurface9> surface, - scoped_refptr<media::VideoFrame> frame) { + scoped_refptr<media::VideoFrame> frame, + PipelineStatistics statistics) { // After this method is exited the reference to surface is released. - event_handler_->ConsumeVideoFrame(frame); + event_handler_->ConsumeVideoFrame(frame, statistics); } } // namespace media diff --git a/media/video/mft_h264_decode_engine.h b/media/video/mft_h264_decode_engine.h index a131c2f..d543631 100644 --- a/media/video/mft_h264_decode_engine.h +++ b/media/video/mft_h264_decode_engine.h @@ -74,11 +74,11 @@ class MftH264DecodeEngine : public media::VideoDecodeEngine { bool SetDecodeEngineOutputMediaType(const GUID subtype); bool SendMFTMessage(MFT_MESSAGE_TYPE msg); bool GetStreamsInfoAndBufferReqs(); - bool DoDecode(); + bool DoDecode(const PipelineStatistics& statistics); void OnAllocFramesDone(); void OnUploadVideoFrameDone( ScopedComPtr<IDirect3DSurface9, &IID_IDirect3DSurface9> surface, - scoped_refptr<media::VideoFrame> frame); + scoped_refptr<media::VideoFrame> frame, PipelineStatistics statistics); bool use_dxva_; ScopedComPtr<IMFTransform> decode_engine_; diff --git a/media/video/mft_h264_decode_engine_unittest.cc b/media/video/mft_h264_decode_engine_unittest.cc index fcf7d69..b1cf33b 100644 --- a/media/video/mft_h264_decode_engine_unittest.cc +++ b/media/video/mft_h264_decode_engine_unittest.cc @@ -98,7 +98,8 @@ class SimpleMftH264DecodeEngineHandler decoder_->ConsumeVideoSample(input); } } - virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) { + virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame, + const PipelineStatistics& statistics) { fill_buffer_callback_count_++; current_frame_ = frame; } diff --git a/media/video/omx_video_decode_engine.cc b/media/video/omx_video_decode_engine.cc index 9ca3089..df3f9cf 100644 --- a/media/video/omx_video_decode_engine.cc +++ b/media/video/omx_video_decode_engine.cc @@ -19,6 +19,7 @@ #include "base/message_loop.h" #include "base/string_util.h" #include "media/base/buffers.h" +#include "media/base/pipeline.h" namespace media { @@ -325,10 +326,13 @@ void OmxVideoDecodeEngine::FinishFillBuffer(OMX_BUFFERHEADERTYPE* buffer) { // provides the buffer allocator. if (kClientFlushing == client_state_ && !uses_egl_image_) return; + PipelineStatistics statistics; + statistics.video_bytes_decoded = buffer->nFilledLen; + frame->SetTimestamp(base::TimeDelta::FromMicroseconds(buffer->nTimeStamp)); frame->SetDuration(frame->GetTimestamp() - last_pts_); last_pts_ = frame->GetTimestamp(); - event_handler_->ConsumeVideoFrame(frame); + event_handler_->ConsumeVideoFrame(frame, statistics); output_pending_request_--; } @@ -607,22 +611,26 @@ void OmxVideoDecodeEngine::ProduceVideoFrame( DCHECK(video_frame.get() && !video_frame->IsEndOfStream()); output_pending_request_++; + PipelineStatistics statistics; + if (!CanAcceptOutput()) { if (uses_egl_image_) { // return it to owner. output_pending_request_--; - event_handler_->ConsumeVideoFrame(video_frame); + event_handler_->ConsumeVideoFrame(video_frame, statistics); } return; } OMX_BUFFERHEADERTYPE* omx_buffer = FindOmxBuffer(video_frame); if (omx_buffer) { + statistics.video_bytes_decoded = omx_buffer->nFilledLen; + if (kClientRunning == client_state_) { SendOutputBufferToComponent(omx_buffer); } else if (kClientFlushing == client_state_) { if (uses_egl_image_) { // return it to owner. output_pending_request_--; - event_handler_->ConsumeVideoFrame(video_frame); + event_handler_->ConsumeVideoFrame(video_frame, statistics); } if (InputPortFlushed() && OutputPortFlushed()) ComponentFlushDone(); @@ -1220,11 +1228,14 @@ void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) { return; } + PipelineStatistics statistics; + statistics.video_bytes_decoded = buffer->nFilledLen; + if (!CanAcceptOutput()) { if (uses_egl_image_) { scoped_refptr<VideoFrame> frame; frame = static_cast<VideoFrame*>(buffer->pAppPrivate); - event_handler_->ConsumeVideoFrame(frame); + event_handler_->ConsumeVideoFrame(frame, statistics); output_pending_request_--; } return; @@ -1244,7 +1255,7 @@ void OmxVideoDecodeEngine::FillBufferDoneTask(OMX_BUFFERHEADERTYPE* buffer) { // Singal end of stream. scoped_refptr<VideoFrame> frame; VideoFrame::CreateEmptyFrame(&frame); - event_handler_->ConsumeVideoFrame(frame); + event_handler_->ConsumeVideoFrame(frame, statistics); } if (client_state_ == kClientFlushing && diff --git a/media/video/video_decode_engine.h b/media/video/video_decode_engine.h index 4182215..57b76a2 100644 --- a/media/video/video_decode_engine.h +++ b/media/video/video_decode_engine.h @@ -15,6 +15,8 @@ namespace media { class Buffer; class VideoDecodeContext; +struct PipelineStatistics; + enum VideoCodec { kCodecH264, kCodecVC1, @@ -109,7 +111,8 @@ class VideoDecodeEngine { // In the case of flushing and video frame is provided externally, this // method is called to return the video frame object to the owner. // The content of the video frame may be invalid. - virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame) = 0; + virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame, + const PipelineStatistics& statistics) = 0; }; virtual ~VideoDecodeEngine() {} |